diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c4d4429d..05669038 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -20,18 +20,6 @@ updates: directory: "/packages/website" schedule: interval: weekly - - package-ecosystem: docker - directory: "/docker/accounts" - schedule: - interval: weekly - - package-ecosystem: docker - directory: "/docker/caddy" - schedule: - interval: weekly - - package-ecosystem: docker - directory: "/docker/handshake" - schedule: - interval: weekly - package-ecosystem: docker directory: "/docker/nginx" schedule: diff --git a/.github/workflows/deploy-dashboard-v2-storybook.yml b/.github/workflows/deploy-dashboard-v2-storybook.yml new file mode 100644 index 00000000..b4591b59 --- /dev/null +++ b/.github/workflows/deploy-dashboard-v2-storybook.yml @@ -0,0 +1,31 @@ +name: Build Storybook - packages/dashboard-v2 + +on: + push: + branches: + - master + paths: + - "packages/dashboard-v2/**" + pull_request: + paths: + - "packages/dashboard-v2/**" + +defaults: + run: + working-directory: packages/dashboard-v2 + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: 16.x + - run: yarn install + - run: yarn build-storybook + - name: "Deploy to Skynet" + uses: skynetlabs/deploy-to-skynet-action@v2 + with: + upload-dir: packages/dashboard-v2/storybook-build + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/deploy-website.yml b/.github/workflows/deploy-website.yml index dc6058ac..143675e2 100644 --- a/.github/workflows/deploy-website.yml +++ b/.github/workflows/deploy-website.yml @@ -36,8 +36,10 @@ jobs: working-directory: packages/website install: false record: true - start: yarn serve - wait-on: "http://127.0.0.1:9000" + start: yarn develop + wait-on: http://localhost:8000 + wait-on-timeout: 120 + config: baseUrl=http://localhost:8000 - name: "Deploy to Skynet" uses: skynetlabs/deploy-to-skynet-action@v2 diff --git a/.github/workflows/lint-dockerfiles.yml b/.github/workflows/lint-dockerfiles.yml new file mode 100644 index 00000000..467dc117 --- /dev/null +++ b/.github/workflows/lint-dockerfiles.yml @@ -0,0 +1,28 @@ +name: Dockerfile Lint + +on: + push: + branches: + - master + pull_request: + +jobs: + hadolint: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + dockerfile: + - docker/nginx/Dockerfile + - docker/sia/Dockerfile + - packages/dashboard/Dockerfile + - packages/dashboard-v2/Dockerfile + - packages/dnslink-api/Dockerfile + - packages/handshake-api/Dockerfile + - packages/health-check/Dockerfile + - packages/website/Dockerfile + steps: + - uses: actions/checkout@v3 + - uses: hadolint/hadolint-action@v2.0.0 + with: + dockerfile: ${{ matrix.dockerfile }} diff --git a/.github/workflows/lint-packages-dashboard-v2.yml b/.github/workflows/lint-packages-dashboard-v2.yml new file mode 100644 index 00000000..a8577562 --- /dev/null +++ b/.github/workflows/lint-packages-dashboard-v2.yml @@ -0,0 +1,24 @@ +name: Lint - packages/dashboard-v2 + +on: + pull_request: + paths: + - packages/dashboard-v2/** + +defaults: + run: + working-directory: packages/dashboard-v2 + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: 16.x + + - run: yarn + - run: yarn prettier --check + - run: yarn lint diff --git a/.github/workflows/nginx-lua-unit-tests.yml b/.github/workflows/nginx-lua-unit-tests.yml index 514459fa..b86a4e04 100644 --- a/.github/workflows/nginx-lua-unit-tests.yml +++ b/.github/workflows/nginx-lua-unit-tests.yml @@ -4,8 +4,15 @@ name: Nginx Lua Unit Tests on: + push: + branches: + - "master" + paths: + - ".github/workflows/nginx-lua-unit-tests.yml" + - "docker/nginx/libs/**.lua" pull_request: paths: + - ".github/workflows/nginx-lua-unit-tests.yml" - "docker/nginx/libs/**.lua" jobs: @@ -25,9 +32,22 @@ jobs: hererocks env --lua=5.1 -rlatest source env/bin/activate luarocks install busted + luarocks install luacov luarocks install hasher + luarocks install luacheck + + - name: Lint code + run: | + source env/bin/activate + luacheck docker/nginx/libs --std ngx_lua+busted - name: Unit Tests run: | source env/bin/activate - busted --verbose --pattern=spec --directory=docker/nginx/libs . + busted --verbose --coverage --pattern=spec --directory=docker/nginx/libs . + cd docker/nginx/libs && luacov + + - uses: codecov/codecov-action@v2 + with: + directory: docker/nginx/libs + flags: nginx-lua diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f5dda75..9c1d4ba9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,46 @@ Version History Latest: +## Mar 8, 2022: +### v0.1.4 +**Key Updates** +- expose generic skylink serving endpoint on domain aliases +- Add abuse scanner service, activated by adding `u` to `PORTAL_MODULES` +- Add malware scanner service, activated by adding `s` to `PORTAL_MODULES` +- Remove ORY Kratos, ORY Oathkeeper, CockroachDB. +- Add `/serverload` endpoint for CPU usage and free disk space + +**Bugs Fixed** +- Add missing servers and blocklist command to the manual blocklist script. +- fixed a bug when accessing file from skylink via subdomain with a filename that had escaped characters +- Fix `blocklist-skylink.sh` script that didn't removed blocked skylink from + nginx cache. +- fixed uploaded directory name (was "undefined" before) +- fixed empty directory upload progress (size was not calculated for directories) + +**Other** +- add new critical health check that scans config and makes sure that all relevant configurations are set +- Add abuse report configuration +- Remove hardcoded Airtable default values from blocklist script. Portal + operators need to define their own values in portal common config (LastPass). +- Add health check for the blocker container +- Drop `Skynet-Requested-Skylink` header +- Dump disk space usage when health-checker script disables portal due to + critical free disk space. +- Enable the accounting module for skyd +- Add link to supported setup process in Gitbook. +- Set `min_free` parameter on the `proxy_cache_path` directive to `100g` +- Parameterize MongoDB replicaset in `docker-compose.mongodb.yml` via + `SKYNET_DB_REPLICASET` from `.env` file. +- Hot reload Nginx after pruning cache files. +- Added script to prune nginx cache. +- Remove hardcoded server list from `blocklist-skylink.sh` so it removes server + list duplication and can also be called from Ansible. +- Remove outdated portal setup documentation and point to developer docs. +- Block skylinks in batches to improve performance. +- Add trimming Airtable skylinks from Takedown Request table. +- Update handshake to use v3.0.1 + ## Oct 18, 2021: ### v0.1.3 **Key Updates** diff --git a/changelog/changelog-tail.md b/changelog/changelog-tail.md index c42922df..fdb02440 100644 --- a/changelog/changelog-tail.md +++ b/changelog/changelog-tail.md @@ -1,3 +1,43 @@ +## Mar 8, 2022: +### v0.1.4 +**Key Updates** +- expose generic skylink serving endpoint on domain aliases +- Add abuse scanner service, activated by adding `u` to `PORTAL_MODULES` +- Add malware scanner service, activated by adding `s` to `PORTAL_MODULES` +- Remove ORY Kratos, ORY Oathkeeper, CockroachDB. +- Add `/serverload` endpoint for CPU usage and free disk space + +**Bugs Fixed** +- Add missing servers and blocklist command to the manual blocklist script. +- fixed a bug when accessing file from skylink via subdomain with a filename that had escaped characters +- Fix `blocklist-skylink.sh` script that didn't removed blocked skylink from + nginx cache. +- fixed uploaded directory name (was "undefined" before) +- fixed empty directory upload progress (size was not calculated for directories) + +**Other** +- add new critical health check that scans config and makes sure that all relevant configurations are set +- Add abuse report configuration +- Remove hardcoded Airtable default values from blocklist script. Portal + operators need to define their own values in portal common config (LastPass). +- Add health check for the blocker container +- Drop `Skynet-Requested-Skylink` header +- Dump disk space usage when health-checker script disables portal due to + critical free disk space. +- Enable the accounting module for skyd +- Add link to supported setup process in Gitbook. +- Set `min_free` parameter on the `proxy_cache_path` directive to `100g` +- Parameterize MongoDB replicaset in `docker-compose.mongodb.yml` via + `SKYNET_DB_REPLICASET` from `.env` file. +- Hot reload Nginx after pruning cache files. +- Added script to prune nginx cache. +- Remove hardcoded server list from `blocklist-skylink.sh` so it removes server + list duplication and can also be called from Ansible. +- Remove outdated portal setup documentation and point to developer docs. +- Block skylinks in batches to improve performance. +- Add trimming Airtable skylinks from Takedown Request table. +- Update handshake to use v3.0.1 + ## Oct 18, 2021: ### v0.1.3 **Key Updates** diff --git a/changelog/items/bugs-fixed/add-missing-blocklist-cmd.md b/changelog/items/bugs-fixed/add-missing-blocklist-cmd.md deleted file mode 100644 index 3d72b402..00000000 --- a/changelog/items/bugs-fixed/add-missing-blocklist-cmd.md +++ /dev/null @@ -1 +0,0 @@ -- Add missing servers and blocklist command to the manual blocklist script. diff --git a/changelog/items/bugs-fixed/escape-uri-on-subdomain-skylink-requests.md b/changelog/items/bugs-fixed/escape-uri-on-subdomain-skylink-requests.md deleted file mode 100644 index 3beabc7e..00000000 --- a/changelog/items/bugs-fixed/escape-uri-on-subdomain-skylink-requests.md +++ /dev/null @@ -1 +0,0 @@ -- fixed a bug when accessing file from skylink via subdomain with a filename that had escaped characters diff --git a/changelog/items/bugs-fixed/fix-blocklist-skylink.md b/changelog/items/bugs-fixed/fix-blocklist-skylink.md deleted file mode 100644 index b7b6d38d..00000000 --- a/changelog/items/bugs-fixed/fix-blocklist-skylink.md +++ /dev/null @@ -1,2 +0,0 @@ -- Fix `blocklist-skylink.sh` script that didn't removed blocked skylink from - nginx cache. \ No newline at end of file diff --git a/changelog/items/bugs-fixed/undefined-dir-upload-name-and-empty-progress.md b/changelog/items/bugs-fixed/undefined-dir-upload-name-and-empty-progress.md deleted file mode 100644 index a6112e27..00000000 --- a/changelog/items/bugs-fixed/undefined-dir-upload-name-and-empty-progress.md +++ /dev/null @@ -1,2 +0,0 @@ -- fixed uploaded directory name (was "undefined" before) -- fixed empty directory upload progress (size was not calculated for directories) diff --git a/changelog/items/key-updates/1223-wildcard-api.md b/changelog/items/key-updates/1223-wildcard-api.md deleted file mode 100644 index 88ca91d9..00000000 --- a/changelog/items/key-updates/1223-wildcard-api.md +++ /dev/null @@ -1 +0,0 @@ -- expose generic skylink serving endpoint on domain aliases diff --git a/changelog/items/key-updates/abuse-scanner.md b/changelog/items/key-updates/abuse-scanner.md deleted file mode 100644 index 54788f19..00000000 --- a/changelog/items/key-updates/abuse-scanner.md +++ /dev/null @@ -1 +0,0 @@ -- Add abuse scanner service, activated by adding `u` to `PORTAL_MODULES` diff --git a/changelog/items/key-updates/malware-scanner.md b/changelog/items/key-updates/malware-scanner.md deleted file mode 100644 index 2559b033..00000000 --- a/changelog/items/key-updates/malware-scanner.md +++ /dev/null @@ -1 +0,0 @@ -- Add malware scanner service, activated by adding `s` to `PORTAL_MODULES` diff --git a/changelog/items/key-updates/remove-kratos.md b/changelog/items/key-updates/remove-kratos.md deleted file mode 100644 index 23a4ac44..00000000 --- a/changelog/items/key-updates/remove-kratos.md +++ /dev/null @@ -1 +0,0 @@ -- Remove ORY Kratos, ORY Oathkeeper, CockroachDB. diff --git a/changelog/items/key-updates/serverload.md b/changelog/items/key-updates/serverload.md deleted file mode 100644 index c626b753..00000000 --- a/changelog/items/key-updates/serverload.md +++ /dev/null @@ -1 +0,0 @@ -- Add `/serverload` endpoint for CPU usage and free disk space diff --git a/changelog/items/other/1332-skyd-config-health-check.md b/changelog/items/other/1332-skyd-config-health-check.md deleted file mode 100644 index 534c7590..00000000 --- a/changelog/items/other/1332-skyd-config-health-check.md +++ /dev/null @@ -1 +0,0 @@ -- add new critical health check that scans config and makes sure that all relevant configurations are set diff --git a/changelog/items/other/add-abuse-config.md b/changelog/items/other/add-abuse-config.md deleted file mode 100644 index 51a55918..00000000 --- a/changelog/items/other/add-abuse-config.md +++ /dev/null @@ -1 +0,0 @@ -- Add abuse report configuration diff --git a/changelog/items/other/airtable-env-vars.md b/changelog/items/other/airtable-env-vars.md deleted file mode 100644 index dc287984..00000000 --- a/changelog/items/other/airtable-env-vars.md +++ /dev/null @@ -1,2 +0,0 @@ -- Remove hardcoded Airtable default values from blocklist script. Portal - operators need to define their own values in portal common config (LastPass). \ No newline at end of file diff --git a/changelog/items/other/blocker-health-check.md b/changelog/items/other/blocker-health-check.md deleted file mode 100644 index 2e45834c..00000000 --- a/changelog/items/other/blocker-health-check.md +++ /dev/null @@ -1 +0,0 @@ -- Add health check for the blocker container diff --git a/changelog/items/other/drop-requested-skylink-header.md b/changelog/items/other/drop-requested-skylink-header.md deleted file mode 100644 index bcf702dc..00000000 --- a/changelog/items/other/drop-requested-skylink-header.md +++ /dev/null @@ -1 +0,0 @@ -- Drop `Skynet-Requested-Skylink` header diff --git a/changelog/items/other/dump-disk-space-usage.md b/changelog/items/other/dump-disk-space-usage.md deleted file mode 100644 index 3c1c3147..00000000 --- a/changelog/items/other/dump-disk-space-usage.md +++ /dev/null @@ -1,2 +0,0 @@ -- Dump disk space usage when health-checker script disables portal due to - critical free disk space. \ No newline at end of file diff --git a/changelog/items/other/enable-accounting.md b/changelog/items/other/enable-accounting.md deleted file mode 100644 index 4771bbe0..00000000 --- a/changelog/items/other/enable-accounting.md +++ /dev/null @@ -1 +0,0 @@ -- Enable the accounting module for skyd diff --git a/changelog/items/other/gitbook-link.md b/changelog/items/other/gitbook-link.md deleted file mode 100644 index 686e8644..00000000 --- a/changelog/items/other/gitbook-link.md +++ /dev/null @@ -1 +0,0 @@ -- Add link to supported setup process in Gitbook. \ No newline at end of file diff --git a/changelog/items/other/min-free-param.md b/changelog/items/other/min-free-param.md deleted file mode 100644 index 0a6239f3..00000000 --- a/changelog/items/other/min-free-param.md +++ /dev/null @@ -1 +0,0 @@ -- Set `min_free` parameter on the `proxy_cache_path` directive to `100g` diff --git a/changelog/items/other/mongo-replicaset-env.md b/changelog/items/other/mongo-replicaset-env.md deleted file mode 100644 index 0dcbb402..00000000 --- a/changelog/items/other/mongo-replicaset-env.md +++ /dev/null @@ -1,2 +0,0 @@ -- Parameterize MongoDB replicaset in `docker-compose.mongodb.yml` via - `SKYNET_DB_REPLICASET` from `.env` file. \ No newline at end of file diff --git a/changelog/items/other/nginx-prune-hot-reload.md b/changelog/items/other/nginx-prune-hot-reload.md deleted file mode 100644 index c0780778..00000000 --- a/changelog/items/other/nginx-prune-hot-reload.md +++ /dev/null @@ -1 +0,0 @@ -- Hot reload Nginx after pruning cache files. \ No newline at end of file diff --git a/changelog/items/other/nginx-prune.md b/changelog/items/other/nginx-prune.md deleted file mode 100644 index 42581090..00000000 --- a/changelog/items/other/nginx-prune.md +++ /dev/null @@ -1 +0,0 @@ -- Added script to prune nginx cache. \ No newline at end of file diff --git a/changelog/items/other/refactor-blocklist.md b/changelog/items/other/refactor-blocklist.md deleted file mode 100644 index 28629dab..00000000 --- a/changelog/items/other/refactor-blocklist.md +++ /dev/null @@ -1,2 +0,0 @@ -- Remove hardcoded server list from `blocklist-skylink.sh` so it removes server - list duplication and can also be called from Ansible. \ No newline at end of file diff --git a/changelog/items/other/remove-outdated-documentation.md b/changelog/items/other/remove-outdated-documentation.md deleted file mode 100644 index 0f221327..00000000 --- a/changelog/items/other/remove-outdated-documentation.md +++ /dev/null @@ -1 +0,0 @@ -- Remove outdated portal setup documentation and point to developer docs. diff --git a/changelog/items/other/skylinks-block-batch.md b/changelog/items/other/skylinks-block-batch.md deleted file mode 100644 index e617082b..00000000 --- a/changelog/items/other/skylinks-block-batch.md +++ /dev/null @@ -1 +0,0 @@ -- Block skylinks in batches to improve performance. \ No newline at end of file diff --git a/changelog/items/other/trim-airtable-skylinks.md b/changelog/items/other/trim-airtable-skylinks.md deleted file mode 100644 index a63a90d0..00000000 --- a/changelog/items/other/trim-airtable-skylinks.md +++ /dev/null @@ -1 +0,0 @@ -- Add trimming Airtable skylinks from Takedown Request table. \ No newline at end of file diff --git a/changelog/items/other/update-handshake.md b/changelog/items/other/update-handshake.md deleted file mode 100644 index f9296fa4..00000000 --- a/changelog/items/other/update-handshake.md +++ /dev/null @@ -1 +0,0 @@ -- Update handshake to use v3.0.1 diff --git a/dc b/dc index b3e68df4..d54bbf2d 100755 --- a/dc +++ b/dc @@ -41,9 +41,9 @@ for i in $(seq 1 ${#PORTAL_MODULES}); do COMPOSE_FILES+=" -f docker-compose.mongodb.yml" fi - # abuse module - alias "u" + # abuse-scanner module - alias "u" if [[ ${PORTAL_MODULES:i-1:1} == "u" ]]; then - COMPOSE_FILES+=" -f docker-compose.mongodb.yml -f docker-compose.blocker.yml -f docker-compose.abuse.yml" + COMPOSE_FILES+=" -f docker-compose.mongodb.yml -f docker-compose.blocker.yml -f docker-compose.abuse-scanner.yml" fi done diff --git a/docker-compose.abuse.yml b/docker-compose.abuse-scanner.yml similarity index 79% rename from docker-compose.abuse.yml rename to docker-compose.abuse-scanner.yml index e3f32750..4edb6556 100644 --- a/docker-compose.abuse.yml +++ b/docker-compose.abuse-scanner.yml @@ -7,11 +7,11 @@ x-logging: &default-logging max-file: "3" services: - abuse: - build: - context: ./docker/abuse - dockerfile: Dockerfile - container_name: abuse + abuse-scanner: + # uncomment "build" and comment out "image" to build from sources + # build: https://github.com/SkynetLabs/abuse-scanner.git#main + image: skynetlabs/abuse-scanner + container_name: abuse-scanner restart: unless-stopped logging: *default-logging env_file: diff --git a/docker-compose.accounts.yml b/docker-compose.accounts.yml index e8159040..3c2c46a9 100644 --- a/docker-compose.accounts.yml +++ b/docker-compose.accounts.yml @@ -20,11 +20,9 @@ services: - ACCOUNTS_LIMIT_ACCESS=${ACCOUNTS_LIMIT_ACCESS:-authenticated} # default to authenticated access only accounts: - build: - context: ./docker/accounts - dockerfile: Dockerfile - args: - branch: main + # uncomment "build" and comment out "image" to build from sources + # build: https://github.com/SkynetLabs/skynet-accounts.git#main + image: skynetlabs/skynet-accounts container_name: accounts restart: unless-stopped logging: *default-logging @@ -66,8 +64,7 @@ services: env_file: - .env environment: - - NEXT_PUBLIC_SKYNET_PORTAL_API=${SKYNET_PORTAL_API} - - NEXT_PUBLIC_SKYNET_DASHBOARD_URL=${SKYNET_DASHBOARD_URL} + - NEXT_PUBLIC_PORTAL_DOMAIN=${PORTAL_DOMAIN} - NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=${STRIPE_PUBLISHABLE_KEY} volumes: - ./docker/data/dashboard/.next:/usr/app/.next @@ -78,3 +75,25 @@ services: - 3000 depends_on: - mongo + + dashboard-v2: + build: + context: ./packages/dashboard-v2 + dockerfile: Dockerfile + container_name: dashboard-v2 + restart: unless-stopped + logging: *default-logging + env_file: + - .env + environment: + - GATSBY_PORTAL_DOMAIN=${PORTAL_DOMAIN} + volumes: + - ./docker/data/dashboard-v2/.cache:/usr/app/.cache + - ./docker/data/dashboard-v2/public:/usr/app/public + networks: + shared: + ipv4_address: 10.10.10.86 + expose: + - 9000 + depends_on: + - mongo diff --git a/docker-compose.blocker.yml b/docker-compose.blocker.yml index 845bdc6a..edcb45c0 100644 --- a/docker-compose.blocker.yml +++ b/docker-compose.blocker.yml @@ -13,9 +13,9 @@ services: - BLOCKER_PORT=4000 blocker: - build: - context: ./docker/blocker - dockerfile: Dockerfile + # uncomment "build" and comment out "image" to build from sources + # build: https://github.com/SkynetLabs/blocker.git#main + image: skynetlabs/blocker container_name: blocker restart: unless-stopped logging: *default-logging diff --git a/docker-compose.jaeger.yml b/docker-compose.jaeger.yml index 297d466a..a7fb2483 100644 --- a/docker-compose.jaeger.yml +++ b/docker-compose.jaeger.yml @@ -10,7 +10,7 @@ services: sia: environment: - JAEGER_DISABLED=${JAEGER_DISABLED:-false} # Enable/Disable tracing - - JAEGER_SERVICE_NAME=${PORTAL_NAME:-Skyd} # change to e.g. eu-ger-1 + - JAEGER_SERVICE_NAME=${SERVER_DOMAIN:-Skyd} # change to e.g. eu-ger-1 # Configuration # See https://github.com/jaegertracing/jaeger-client-go#environment-variables # for all options. @@ -21,7 +21,7 @@ services: - JAEGER_REPORTER_LOG_SPANS=false jaeger-agent: - image: jaegertracing/jaeger-agent + image: jaegertracing/jaeger-agent:1.32.0 command: [ "--reporter.grpc.host-port=jaeger-collector:14250", @@ -43,7 +43,7 @@ services: - jaeger-collector jaeger-collector: - image: jaegertracing/jaeger-collector + image: jaegertracing/jaeger-collector:1.32.0 entrypoint: /wait_to_start.sh container_name: jaeger-collector restart: on-failure @@ -68,7 +68,7 @@ services: - elasticsearch jaeger-query: - image: jaegertracing/jaeger-query + image: jaegertracing/jaeger-query:1.32.0 entrypoint: /wait_to_start.sh container_name: jaeger-query restart: on-failure diff --git a/docker-compose.malware-scanner.yml b/docker-compose.malware-scanner.yml index 946e8c5c..fba60f98 100644 --- a/docker-compose.malware-scanner.yml +++ b/docker-compose.malware-scanner.yml @@ -26,19 +26,17 @@ services: ipv4_address: 10.10.10.100 malware-scanner: - build: - context: ./docker/malware-scanner - dockerfile: Dockerfile - args: - branch: main + # uncomment "build" and comment out "image" to build from sources + # build: https://github.com/SkynetLabs/malware-scanner.git#main + image: skynetlabs/malware-scanner container_name: malware-scanner restart: unless-stopped logging: *default-logging env_file: - .env environment: - - CLAMAV_IP=${CLAMAV_IP:-10.10.10.100} - - CLAMAV_PORT=${CLAMAV_PORT:-3310} + - CLAMAV_IP=10.10.10.100 + - CLAMAV_PORT=3310 - BLOCKER_IP=10.10.10.110 - BLOCKER_PORT=4000 expose: diff --git a/docker-compose.uploads.yml b/docker-compose.uploads.yml deleted file mode 100644 index c7d3043a..00000000 --- a/docker-compose.uploads.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: "3.7" - -services: - nginx: - build: - context: ./docker/nginx - dockerfile: Dockerfile.bionic - args: - RESTY_ADD_PACKAGE_BUILDDEPS: git - RESTY_EVAL_PRE_CONFIGURE: git clone https://github.com/fdintino/nginx-upload-module /tmp/nginx-upload-module - RESTY_CONFIG_OPTIONS_MORE: --add-module=/tmp/nginx-upload-module - RESTY_EVAL_POST_MAKE: /usr/local/openresty/luajit/bin/luarocks install luasocket diff --git a/docker-compose.yml b/docker-compose.yml index e63df946..5838f136 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,11 +25,10 @@ services: logging: *default-logging environment: - SIA_MODULES=gctwra - - SKYD_DISK_CACHE_ENABLED=${SKYD_DISK_CACHE_ENABLED:-false} + - SKYD_DISK_CACHE_ENABLED=${SKYD_DISK_CACHE_ENABLED:-true} - SKYD_DISK_CACHE_SIZE=${SKYD_DISK_CACHE_SIZE:-53690000000} # 50GB - SKYD_DISK_CACHE_MIN_HITS=${SKYD_DISK_CACHE_MIN_HITS:-3} - SKYD_DISK_CACHE_HIT_PERIOD=${SKYD_DISK_CACHE_HIT_PERIOD:-3600} # 1h - env_file: - .env volumes: @@ -40,21 +39,19 @@ services: expose: - 9980 - caddy: - build: - context: ./docker/caddy - dockerfile: Dockerfile - container_name: caddy + certbot: + image: certbot/dns-route53:v1.25.0 + entrypoint: sh /entrypoint.sh + container_name: certbot restart: unless-stopped logging: *default-logging env_file: - .env + environment: + - CERTBOT_ARGS=--dns-route53 volumes: - - ./docker/data/caddy/data:/data - - ./docker/data/caddy/config:/config - networks: - shared: - ipv4_address: 10.10.10.20 + - ./docker/certbot/entrypoint.sh:/entrypoint.sh + - ./docker/data/certbot:/etc/letsencrypt nginx: build: @@ -71,7 +68,7 @@ services: - ./docker/data/nginx/logs:/usr/local/openresty/nginx/logs - ./docker/data/nginx/skynet:/data/nginx/skynet:ro - ./docker/data/sia/apipassword:/data/sia/apipassword:ro - - ./docker/data/caddy/data:/data/caddy:ro + - ./docker/data/certbot:/etc/letsencrypt networks: shared: ipv4_address: 10.10.10.30 @@ -80,15 +77,16 @@ services: - "80:80" depends_on: - sia - - caddy - handshake-api - dnslink-api - website website: - build: - context: ./packages/website - dockerfile: Dockerfile + # uncomment "build" and comment out "image" to build from sources + # build: + # context: https://github.com/SkynetLabs/skynet-webportal.git#master + # dockerfile: ./packages/website/Dockerfile + image: skynetlabs/website container_name: website restart: unless-stopped logging: *default-logging @@ -101,9 +99,7 @@ services: - 9000 handshake: - build: - context: ./docker/handshake - dockerfile: Dockerfile + image: skynetlabs/hsd:3.0.1 command: --chain-migrate=2 --wallet-migrate=1 container_name: handshake restart: unless-stopped @@ -124,9 +120,11 @@ services: - 12037 handshake-api: - build: - context: ./packages/handshake-api - dockerfile: Dockerfile + # uncomment "build" and comment out "image" to build from sources + # build: + # context: https://github.com/SkynetLabs/skynet-webportal.git#master + # dockerfile: ./packages/handshake-api/Dockerfile + image: skynetlabs/handshake-api container_name: handshake-api restart: unless-stopped logging: *default-logging @@ -146,9 +144,11 @@ services: - handshake dnslink-api: - build: - context: ./packages/dnslink-api - dockerfile: Dockerfile + # uncomment "build" and comment out "image" to build from sources + # build: + # context: https://github.com/SkynetLabs/skynet-webportal.git#master + # dockerfile: ./packages/dnslink-api/Dockerfile + image: skynetlabs/dnslink-api container_name: dnslink-api restart: unless-stopped logging: *default-logging @@ -159,9 +159,11 @@ services: - 3100 health-check: - build: - context: ./packages/health-check - dockerfile: Dockerfile + # uncomment "build" and comment out "image" to build from sources + # build: + # context: https://github.com/SkynetLabs/skynet-webportal.git#master + # dockerfile: ./packages/health-check/Dockerfile + image: skynetlabs/health-check container_name: health-check restart: unless-stopped logging: *default-logging @@ -177,5 +179,3 @@ services: - STATE_DIR=/usr/app/state expose: - 3100 - depends_on: - - caddy diff --git a/docker/abuse/Dockerfile b/docker/abuse/Dockerfile deleted file mode 100644 index f27fb769..00000000 --- a/docker/abuse/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM golang:1.16.7 -LABEL maintainer="SkynetLabs " - -ENV GOOS linux -ENV GOARCH amd64 - -ARG branch=main - -WORKDIR /root - -RUN git clone --single-branch --branch ${branch} https://github.com/SkynetLabs/abuse-scanner.git && \ - cd abuse-scanner && \ - go mod download && \ - make release - -ENTRYPOINT ["abuse-scanner"] diff --git a/docker/accounts/Dockerfile b/docker/accounts/Dockerfile deleted file mode 100644 index 5cbf359a..00000000 --- a/docker/accounts/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM golang:1.16.7 -LABEL maintainer="SkynetLabs " - -ENV GOOS linux -ENV GOARCH amd64 - -ARG branch=main - -WORKDIR /root - -RUN git clone --single-branch --branch ${branch} https://github.com/SkynetLabs/skynet-accounts.git && \ - cd skynet-accounts && \ - go mod download && \ - make release - -ENV SKYNET_DB_HOST="localhost" -ENV SKYNET_DB_PORT="27017" -ENV SKYNET_DB_USER="username" -ENV SKYNET_DB_PASS="password" -ENV SKYNET_ACCOUNTS_PORT=3000 - -ENTRYPOINT ["skynet-accounts"] diff --git a/docker/blocker/Dockerfile b/docker/blocker/Dockerfile deleted file mode 100644 index 70d572b9..00000000 --- a/docker/blocker/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM golang:1.16.7 -LABEL maintainer="SkynetLabs " - -ENV GOOS linux -ENV GOARCH amd64 - -ARG branch=main - -WORKDIR /root - -RUN git clone --single-branch --branch ${branch} https://github.com/SkynetLabs/blocker.git && \ - cd blocker && \ - go mod download && \ - make release - -ENTRYPOINT ["blocker"] diff --git a/docker/caddy/Dockerfile b/docker/caddy/Dockerfile deleted file mode 100644 index 31075803..00000000 --- a/docker/caddy/Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -FROM caddy:2.4.6-builder AS caddy-builder - -# available dns resolvers: https://github.com/caddy-dns -RUN xcaddy build --with github.com/caddy-dns/route53 - -FROM caddy:2.4.6-alpine - -COPY --from=caddy-builder /usr/bin/caddy /usr/bin/caddy - -# bash required for mo to work (mo is mustache templating engine - https://github.com/tests-always-included/mo) -RUN apk add --no-cache bash - -COPY caddy.json.template mo /etc/caddy/ - -CMD [ "sh", "-c", \ - "/etc/caddy/mo < /etc/caddy/caddy.json.template > /etc/caddy/caddy.json ; \ - caddy run --config /etc/caddy/caddy.json" \ - ] diff --git a/docker/caddy/caddy.json.template b/docker/caddy/caddy.json.template deleted file mode 100644 index 0dadbb9e..00000000 --- a/docker/caddy/caddy.json.template +++ /dev/null @@ -1,39 +0,0 @@ -{ - "apps": { - "tls": { - "certificates": { - "automate": [ - {{#PORTAL_DOMAIN}} - "{{PORTAL_DOMAIN}}", "*.{{PORTAL_DOMAIN}}", "*.hns.{{PORTAL_DOMAIN}}" - {{/PORTAL_DOMAIN}} - - {{#PORTAL_DOMAIN}}{{#SERVER_DOMAIN}},{{/SERVER_DOMAIN}}{{/PORTAL_DOMAIN}} - - {{#SERVER_DOMAIN}} - "{{SERVER_DOMAIN}}", "*.{{SERVER_DOMAIN}}", "*.hns.{{SERVER_DOMAIN}}" - {{/SERVER_DOMAIN}} - ] - }, - "automation": { - "policies": [ - { - "issuers": [ - { - "module": "acme", - "email": "{{EMAIL_ADDRESS}}", - "challenges": { - "dns": { - "provider": { - "name": "route53" - }, - "ttl": "30m" - } - } - } - ] - } - ] - } - } - } -} diff --git a/docker/caddy/mo b/docker/caddy/mo deleted file mode 100755 index ba8e48d1..00000000 --- a/docker/caddy/mo +++ /dev/null @@ -1,1106 +0,0 @@ -#!/usr/bin/env bash -# -#/ Mo is a mustache template rendering software written in bash. It inserts -#/ environment variables into templates. -#/ -#/ Simply put, mo will change {{VARIABLE}} into the value of that -#/ environment variable. You can use {{#VARIABLE}}content{{/VARIABLE}} to -#/ conditionally display content or iterate over the values of an array. -#/ -#/ Learn more about mustache templates at https://mustache.github.io/ -#/ -#/ Simple usage: -#/ -#/ mo [OPTIONS] filenames... -#/ -#/ Options: -#/ -#/ -u, --fail-not-set -#/ Fail upon expansion of an unset variable. -#/ -x, --fail-on-function -#/ Fail when a function returns a non-zero status code. -#/ -e, --false -#/ Treat the string "false" as empty for conditionals. -#/ -h, --help -#/ This message. -#/ -s=FILE, --source=FILE -#/ Load FILE into the environment before processing templates. -#/ Can be used multiple times. -# -# Mo is under a MIT style licence with an additional non-advertising clause. -# See LICENSE.md for the full text. -# -# This is open source! Please feel free to contribute. -# -# https://github.com/tests-always-included/mo - - -# Public: Template parser function. Writes templates to stdout. -# -# $0 - Name of the mo file, used for getting the help message. -# $@ - Filenames to parse. -# -# Options: -# -# --allow-function-arguments -# -# Permit functions in templates to be called with additional arguments. This -# puts template data directly in to the path of an eval statement. Use with -# caution. Not listed in the help because it only makes sense when mo is -# sourced. -# -# -u, --fail-not-set -# -# Fail upon expansion of an unset variable. Default behavior is to silently -# ignore and expand into empty string. -# -# -x, --fail-on-function -# -# Fail when a function used by a template returns an error status code. -# Alternately, ou may set the MO_FAIL_ON_FUNCTION environment variable to a -# non-empty value to enable this behavior. -# -# -e, --false -# -# Treat "false" as an empty value. You may set the MO_FALSE_IS_EMPTY -# environment variable instead to a non-empty value to enable this behavior. -# -# -h, --help -# -# Display a help message. -# -# -s=FILE, --source=FILE -# -# Source a file into the environment before processing template files. -# This can be used multiple times. -# -# -- -# -# Used to indicate the end of options. You may optionally use this when -# filenames may start with two hyphens. -# -# Mo uses the following environment variables: -# -# MO_ALLOW_FUNCTION_ARGUMENTS - When set to a non-empty value, this allows -# functions referenced in templates to receive additional -# options and arguments. This puts the content from the -# template directly into an eval statement. Use with extreme -# care. -# MO_FUNCTION_ARGS - Arguments passed to the function -# MO_FAIL_ON_FUNCTION - If a function returns a non-zero status code, abort -# with an error. -# MO_FAIL_ON_UNSET - When set to a non-empty value, expansion of an unset env -# variable will be aborted with an error. -# MO_FALSE_IS_EMPTY - When set to a non-empty value, the string "false" will be -# treated as an empty value for the purposes of conditionals. -# MO_ORIGINAL_COMMAND - Used to find the `mo` program in order to generate a -# help message. -# -# Returns nothing. -mo() ( - # This function executes in a subshell so IFS is reset. - # Namespace this variable so we don't conflict with desired values. - local moContent f2source files doubleHyphens - - IFS=$' \n\t' - files=() - doubleHyphens=false - - if [[ $# -gt 0 ]]; then - for arg in "$@"; do - if $doubleHyphens; then - #: After we encounter two hyphens together, all the rest - #: of the arguments are files. - files=("${files[@]}" "$arg") - else - case "$arg" in - -h|--h|--he|--hel|--help|-\?) - moUsage "$0" - exit 0 - ;; - - --allow-function-arguments) - # shellcheck disable=SC2030 - MO_ALLOW_FUNCTION_ARGUMENTS=true - ;; - - -u | --fail-not-set) - # shellcheck disable=SC2030 - MO_FAIL_ON_UNSET=true - ;; - - -x | --fail-on-function) - # shellcheck disable=SC2030 - MO_FAIL_ON_FUNCTION=true - ;; - - -e | --false) - # shellcheck disable=SC2030 - MO_FALSE_IS_EMPTY=true - ;; - - -s=* | --source=*) - if [[ "$arg" == --source=* ]]; then - f2source="${arg#--source=}" - else - f2source="${arg#-s=}" - fi - - if [[ -f "$f2source" ]]; then - # shellcheck disable=SC1090 - . "$f2source" - else - echo "No such file: $f2source" >&2 - exit 1 - fi - ;; - - --) - #: Set a flag indicating we've encountered double hyphens - doubleHyphens=true - ;; - - *) - #: Every arg that is not a flag or a option should be a file - files=(${files[@]+"${files[@]}"} "$arg") - ;; - esac - fi - done - fi - - moGetContent moContent "${files[@]}" || return 1 - moParse "$moContent" "" true -) - - -# Internal: Call a function. -# -# $1 - Variable for output -# $2 - Function to call -# $3 - Content to pass -# $4 - Additional arguments as a single string -# -# This can be dangerous, especially if you are using tags like -# {{someFunction ; rm -rf / }} -# -# Returns nothing. -moCallFunction() { - local moArgs moContent moFunctionArgs moFunctionResult - - moArgs=() - moTrimWhitespace moFunctionArgs "$4" - - # shellcheck disable=SC2031 - if [[ -n "${MO_ALLOW_FUNCTION_ARGUMENTS-}" ]]; then - # Intentionally bad behavior - # shellcheck disable=SC2206 - moArgs=($4) - fi - - moContent=$(echo -n "$3" | MO_FUNCTION_ARGS="$moFunctionArgs" eval "$2" "${moArgs[@]}") || { - moFunctionResult=$? - # shellcheck disable=SC2031 - if [[ -n "${MO_FAIL_ON_FUNCTION-}" && "$moFunctionResult" != 0 ]]; then - echo "Function '$2' with args (${moArgs[*]+"${moArgs[@]}"}) failed with status code $moFunctionResult" - exit "$moFunctionResult" - fi - } - - # shellcheck disable=SC2031 - local "$1" && moIndirect "$1" "$moContent" -} - - -# Internal: Scan content until the right end tag is found. Creates an array -# with the following members: -# -# [0] = Content before end tag -# [1] = End tag (complete tag) -# [2] = Content after end tag -# -# Everything using this function uses the "standalone tags" logic. -# -# $1 - Name of variable for the array -# $2 - Content -# $3 - Name of end tag -# $4 - If -z, do standalone tag processing before finishing -# -# Returns nothing. -moFindEndTag() { - local content remaining scanned standaloneBytes tag - - #: Find open tags - scanned="" - moSplit content "$2" '{{' '}}' - - while [[ "${#content[@]}" -gt 1 ]]; do - moTrimWhitespace tag "${content[1]}" - - #: Restore content[1] before we start using it - content[1]='{{'"${content[1]}"'}}' - - case $tag in - '#'* | '^'*) - #: Start another block - scanned="${scanned}${content[0]}${content[1]}" - moTrimWhitespace tag "${tag:1}" - moFindEndTag content "${content[2]}" "$tag" "loop" - scanned="${scanned}${content[0]}${content[1]}" - remaining=${content[2]} - ;; - - '/'*) - #: End a block - could be ours - moTrimWhitespace tag "${tag:1}" - scanned="$scanned${content[0]}" - - if [[ "$tag" == "$3" ]]; then - #: Found our end tag - if [[ -z "${4-}" ]] && moIsStandalone standaloneBytes "$scanned" "${content[2]}" true; then - #: This is also a standalone tag - clean up whitespace - #: and move those whitespace bytes to the "tag" element - # shellcheck disable=SC2206 - standaloneBytes=( $standaloneBytes ) - content[1]="${scanned:${standaloneBytes[0]}}${content[1]}${content[2]:0:${standaloneBytes[1]}}" - scanned="${scanned:0:${standaloneBytes[0]}}" - content[2]="${content[2]:${standaloneBytes[1]}}" - fi - - local "$1" && moIndirectArray "$1" "$scanned" "${content[1]}" "${content[2]}" - return 0 - fi - - scanned="$scanned${content[1]}" - remaining=${content[2]} - ;; - - *) - #: Ignore all other tags - scanned="${scanned}${content[0]}${content[1]}" - remaining=${content[2]} - ;; - esac - - moSplit content "$remaining" '{{' '}}' - done - - #: Did not find our closing tag - scanned="$scanned${content[0]}" - local "$1" && moIndirectArray "$1" "${scanned}" "" "" -} - - -# Internal: Find the first index of a substring. If not found, sets the -# index to -1. -# -# $1 - Destination variable for the index -# $2 - Haystack -# $3 - Needle -# -# Returns nothing. -moFindString() { - local pos string - - string=${2%%$3*} - [[ "$string" == "$2" ]] && pos=-1 || pos=${#string} - local "$1" && moIndirect "$1" "$pos" -} - - -# Internal: Generate a dotted name based on current context and target name. -# -# $1 - Target variable to store results -# $2 - Context name -# $3 - Desired variable name -# -# Returns nothing. -moFullTagName() { - if [[ -z "${2-}" ]] || [[ "$2" == *.* ]]; then - local "$1" && moIndirect "$1" "$3" - else - local "$1" && moIndirect "$1" "${2}.${3}" - fi -} - - -# Internal: Fetches the content to parse into a variable. Can be a list of -# partials for files or the content from stdin. -# -# $1 - Variable name to assign this content back as -# $2-@ - File names (optional) -# -# Returns nothing. -moGetContent() { - local moContent moFilename moTarget - - moTarget=$1 - shift - if [[ "${#@}" -gt 0 ]]; then - moContent="" - - for moFilename in "$@"; do - #: This is so relative paths work from inside template files - moContent="$moContent"'{{>'"$moFilename"'}}' - done - else - moLoadFile moContent || return 1 - fi - - local "$moTarget" && moIndirect "$moTarget" "$moContent" -} - - -# Internal: Indent a string, placing the indent at the beginning of every -# line that has any content. -# -# $1 - Name of destination variable to get an array of lines -# $2 - The indent string -# $3 - The string to reindent -# -# Returns nothing. -moIndentLines() { - local content fragment len posN posR result trimmed - - result="" - - #: Remove the period from the end of the string. - len=$((${#3} - 1)) - content=${3:0:$len} - - if [[ -z "${2-}" ]]; then - local "$1" && moIndirect "$1" "$content" - - return 0 - fi - - moFindString posN "$content" $'\n' - moFindString posR "$content" $'\r' - - while [[ "$posN" -gt -1 ]] || [[ "$posR" -gt -1 ]]; do - if [[ "$posN" -gt -1 ]]; then - fragment="${content:0:$posN + 1}" - content=${content:$posN + 1} - else - fragment="${content:0:$posR + 1}" - content=${content:$posR + 1} - fi - - moTrimChars trimmed "$fragment" false true " " $'\t' $'\n' $'\r' - - if [[ -n "$trimmed" ]]; then - fragment="$2$fragment" - fi - - result="$result$fragment" - - moFindString posN "$content" $'\n' - moFindString posR "$content" $'\r' - - # If the content ends in a newline, do not indent. - if [[ "$posN" -eq ${#content} ]]; then - # Special clause for \r\n - if [[ "$posR" -eq "$((posN - 1))" ]]; then - posR=-1 - fi - - posN=-1 - fi - - if [[ "$posR" -eq ${#content} ]]; then - posR=-1 - fi - done - - moTrimChars trimmed "$content" false true " " $'\t' - - if [[ -n "$trimmed" ]]; then - content="$2$content" - fi - - result="$result$content" - - local "$1" && moIndirect "$1" "$result" -} - - -# Internal: Send a variable up to the parent of the caller of this function. -# -# $1 - Variable name -# $2 - Value -# -# Examples -# -# callFunc () { -# local "$1" && moIndirect "$1" "the value" -# } -# callFunc dest -# echo "$dest" # writes "the value" -# -# Returns nothing. -moIndirect() { - unset -v "$1" - printf -v "$1" '%s' "$2" -} - - -# Internal: Send an array as a variable up to caller of a function -# -# $1 - Variable name -# $2-@ - Array elements -# -# Examples -# -# callFunc () { -# local myArray=(one two three) -# local "$1" && moIndirectArray "$1" "${myArray[@]}" -# } -# callFunc dest -# echo "${dest[@]}" # writes "one two three" -# -# Returns nothing. -moIndirectArray() { - unset -v "$1" - - # IFS must be set to a string containing space or unset in order for - # the array slicing to work regardless of the current IFS setting on - # bash 3. This is detailed further at - # https://github.com/fidian/gg-core/pull/7 - eval "$(printf "IFS= %s=(\"\${@:2}\") IFS=%q" "$1" "$IFS")" -} - - -# Internal: Determine if a given environment variable exists and if it is -# an array. -# -# $1 - Name of environment variable -# -# Be extremely careful. Even if strict mode is enabled, it is not honored -# in newer versions of Bash. Any errors that crop up here will not be -# caught automatically. -# -# Examples -# -# var=(abc) -# if moIsArray var; then -# echo "This is an array" -# echo "Make sure you don't accidentally use \$var" -# fi -# -# Returns 0 if the name is not empty, 1 otherwise. -moIsArray() { - # Namespace this variable so we don't conflict with what we're testing. - local moTestResult - - moTestResult=$(declare -p "$1" 2>/dev/null) || return 1 - [[ "${moTestResult:0:10}" == "declare -a" ]] && return 0 - [[ "${moTestResult:0:10}" == "declare -A" ]] && return 0 - - return 1 -} - - -# Internal: Determine if the given name is a defined function. -# -# $1 - Function name to check -# -# Be extremely careful. Even if strict mode is enabled, it is not honored -# in newer versions of Bash. Any errors that crop up here will not be -# caught automatically. -# -# Examples -# -# moo () { -# echo "This is a function" -# } -# if moIsFunction moo; then -# echo "moo is a defined function" -# fi -# -# Returns 0 if the name is a function, 1 otherwise. -moIsFunction() { - local functionList functionName - - functionList=$(declare -F) - # shellcheck disable=SC2206 - functionList=( ${functionList//declare -f /} ) - - for functionName in "${functionList[@]}"; do - if [[ "$functionName" == "$1" ]]; then - return 0 - fi - done - - return 1 -} - - -# Internal: Determine if the tag is a standalone tag based on whitespace -# before and after the tag. -# -# Passes back a string containing two numbers in the format "BEFORE AFTER" -# like "27 10". It indicates the number of bytes remaining in the "before" -# string (27) and the number of bytes to trim in the "after" string (10). -# Useful for string manipulation: -# -# $1 - Variable to set for passing data back -# $2 - Content before the tag -# $3 - Content after the tag -# $4 - true/false: is this the beginning of the content? -# -# Examples -# -# moIsStandalone RESULT "$before" "$after" false || return 0 -# RESULT_ARRAY=( $RESULT ) -# echo "${before:0:${RESULT_ARRAY[0]}}...${after:${RESULT_ARRAY[1]}}" -# -# Returns nothing. -moIsStandalone() { - local afterTrimmed beforeTrimmed char - - moTrimChars beforeTrimmed "$2" false true " " $'\t' - moTrimChars afterTrimmed "$3" true false " " $'\t' - char=$((${#beforeTrimmed} - 1)) - char=${beforeTrimmed:$char} - - # If the content before didn't end in a newline - if [[ "$char" != $'\n' ]] && [[ "$char" != $'\r' ]]; then - # and there was content or this didn't start the file - if [[ -n "$char" ]] || ! $4; then - # then this is not a standalone tag. - return 1 - fi - fi - - char=${afterTrimmed:0:1} - - # If the content after doesn't start with a newline and it is something - if [[ "$char" != $'\n' ]] && [[ "$char" != $'\r' ]] && [[ -n "$char" ]]; then - # then this is not a standalone tag. - return 2 - fi - - if [[ "$char" == $'\r' ]] && [[ "${afterTrimmed:1:1}" == $'\n' ]]; then - char="$char"$'\n' - fi - - local "$1" && moIndirect "$1" "$((${#beforeTrimmed})) $((${#3} + ${#char} - ${#afterTrimmed}))" -} - - -# Internal: Join / implode an array -# -# $1 - Variable name to receive the joined content -# $2 - Joiner -# $3-$* - Elements to join -# -# Returns nothing. -moJoin() { - local joiner part result target - - target=$1 - joiner=$2 - result=$3 - shift 3 - - for part in "$@"; do - result="$result$joiner$part" - done - - local "$target" && moIndirect "$target" "$result" -} - - -# Internal: Read a file into a variable. -# -# $1 - Variable name to receive the file's content -# $2 - Filename to load - if empty, defaults to /dev/stdin -# -# Returns nothing. -moLoadFile() { - local content len - - # The subshell removes any trailing newlines. We forcibly add - # a dot to the content to preserve all newlines. - # As a future optimization, it would be worth considering removing - # cat and replacing this with a read loop. - - content=$(cat -- "${2:-/dev/stdin}" && echo '.') || return 1 - len=$((${#content} - 1)) - content=${content:0:$len} # Remove last dot - - local "$1" && moIndirect "$1" "$content" -} - - -# Internal: Process a chunk of content some number of times. Writes output -# to stdout. -# -# $1 - Content to parse repeatedly -# $2 - Tag prefix (context name) -# $3-@ - Names to insert into the parsed content -# -# Returns nothing. -moLoop() { - local content context contextBase - - content=$1 - contextBase=$2 - shift 2 - - while [[ "${#@}" -gt 0 ]]; do - moFullTagName context "$contextBase" "$1" - moParse "$content" "$context" false - shift - done -} - - -# Internal: Parse a block of text, writing the result to stdout. -# -# $1 - Block of text to change -# $2 - Current name (the variable NAME for what {{.}} means) -# $3 - true when no content before this, false otherwise -# -# Returns nothing. -moParse() { - # Keep naming variables mo* here to not overwrite needed variables - # used in the string replacements - local moArgs moBlock moContent moCurrent moIsBeginning moNextIsBeginning moTag - - moCurrent=$2 - moIsBeginning=$3 - - # Find open tags - moSplit moContent "$1" '{{' '}}' - - while [[ "${#moContent[@]}" -gt 1 ]]; do - moTrimWhitespace moTag "${moContent[1]}" - moNextIsBeginning=false - - case $moTag in - '#'*) - # Loop, if/then, or pass content through function - # Sets context - moStandaloneAllowed moContent "${moContent[@]}" "$moIsBeginning" - moTrimWhitespace moTag "${moTag:1}" - - # Split arguments from the tag name. Arguments are passed to - # functions. - moArgs=$moTag - moTag=${moTag%% *} - moTag=${moTag%%$'\t'*} - moArgs=${moArgs:${#moTag}} - moFindEndTag moBlock "$moContent" "$moTag" - moFullTagName moTag "$moCurrent" "$moTag" - - if moTest "$moTag"; then - # Show / loop / pass through function - if moIsFunction "$moTag"; then - moCallFunction moContent "$moTag" "${moBlock[0]}" "$moArgs" - moParse "$moContent" "$moCurrent" false - moContent="${moBlock[2]}" - elif moIsArray "$moTag"; then - eval "moLoop \"\${moBlock[0]}\" \"$moTag\" \"\${!${moTag}[@]}\"" - else - moParse "${moBlock[0]}" "$moCurrent" true - fi - fi - - moContent="${moBlock[2]}" - ;; - - '>'*) - # Load partial - get name of file relative to cwd - moPartial moContent "${moContent[@]}" "$moIsBeginning" "$moCurrent" - moNextIsBeginning=${moContent[1]} - moContent=${moContent[0]} - ;; - - '/'*) - # Closing tag - If hit in this loop, we simply ignore - # Matching tags are found in moFindEndTag - moStandaloneAllowed moContent "${moContent[@]}" "$moIsBeginning" - ;; - - '^'*) - # Display section if named thing does not exist - moStandaloneAllowed moContent "${moContent[@]}" "$moIsBeginning" - moTrimWhitespace moTag "${moTag:1}" - moFindEndTag moBlock "$moContent" "$moTag" - moFullTagName moTag "$moCurrent" "$moTag" - - if ! moTest "$moTag"; then - moParse "${moBlock[0]}" "$moCurrent" false "$moCurrent" - fi - - moContent="${moBlock[2]}" - ;; - - '!'*) - # Comment - ignore the tag content entirely - # Trim spaces/tabs before the comment - moStandaloneAllowed moContent "${moContent[@]}" "$moIsBeginning" - ;; - - .) - # Current content (environment variable or function) - moStandaloneDenied moContent "${moContent[@]}" - moShow "$moCurrent" "$moCurrent" - ;; - - '=') - # Change delimiters - # Any two non-whitespace sequences separated by whitespace. - # This tag is ignored. - moStandaloneAllowed moContent "${moContent[@]}" "$moIsBeginning" - ;; - - '{'*) - # Unescaped - split on }}} not }} - moStandaloneDenied moContent "${moContent[@]}" - moContent="${moTag:1}"'}}'"$moContent" - moSplit moContent "$moContent" '}}}' - moTrimWhitespace moTag "${moContent[0]}" - moArgs=$moTag - moTag=${moTag%% *} - moTag=${moTag%%$'\t'*} - moArgs=${moArgs:${#moTag}} - moFullTagName moTag "$moCurrent" "$moTag" - moContent=${moContent[1]} - - # Now show the value - # Quote moArgs here, do not quote it later. - moShow "$moTag" "$moCurrent" "$moArgs" - ;; - - '&'*) - # Unescaped - moStandaloneDenied moContent "${moContent[@]}" - moTrimWhitespace moTag "${moTag:1}" - moFullTagName moTag "$moCurrent" "$moTag" - moShow "$moTag" "$moCurrent" - ;; - - *) - # Normal environment variable or function call - moStandaloneDenied moContent "${moContent[@]}" - moArgs=$moTag - moTag=${moTag%% *} - moTag=${moTag%%$'\t'*} - moArgs=${moArgs:${#moTag}} - moFullTagName moTag "$moCurrent" "$moTag" - - # Quote moArgs here, do not quote it later. - moShow "$moTag" "$moCurrent" "$moArgs" - ;; - esac - - moIsBeginning=$moNextIsBeginning - moSplit moContent "$moContent" '{{' '}}' - done - - echo -n "${moContent[0]}" -} - - -# Internal: Process a partial. -# -# Indentation should be applied to the entire partial. -# -# This sends back the "is beginning" flag because the newline after a -# standalone partial is consumed. That newline is very important in the middle -# of content. We send back this flag to reset the processing loop's -# `moIsBeginning` variable, so the software thinks we are back at the -# beginning of a file and standalone processing continues to work. -# -# Prefix all variables. -# -# $1 - Name of destination variable. Element [0] is the content, [1] is the -# true/false flag indicating if we are at the beginning of content. -# $2 - Content before the tag that was not yet written -# $3 - Tag content -# $4 - Content after the tag -# $5 - true/false: is this the beginning of the content? -# $6 - Current context name -# -# Returns nothing. -moPartial() { - # Namespace variables here to prevent conflicts. - local moContent moFilename moIndent moIsBeginning moPartial moStandalone moUnindented - - if moIsStandalone moStandalone "$2" "$4" "$5"; then - # shellcheck disable=SC2206 - moStandalone=( $moStandalone ) - echo -n "${2:0:${moStandalone[0]}}" - moIndent=${2:${moStandalone[0]}} - moContent=${4:${moStandalone[1]}} - moIsBeginning=true - else - moIndent="" - echo -n "$2" - moContent=$4 - moIsBeginning=$5 - fi - - moTrimWhitespace moFilename "${3:1}" - - # Execute in subshell to preserve current cwd and environment - ( - # It would be nice to remove `dirname` and use a function instead, - # but that's difficult when you're only given filenames. - cd "$(dirname -- "$moFilename")" || exit 1 - moUnindented="$( - moLoadFile moPartial "${moFilename##*/}" || exit 1 - moParse "${moPartial}" "$6" true - - # Fix bash handling of subshells and keep trailing whitespace. - # This is removed in moIndentLines. - echo -n "." - )" || exit 1 - moIndentLines moPartial "$moIndent" "$moUnindented" - echo -n "$moPartial" - ) || exit 1 - - # If this is a standalone tag, the trailing newline after the tag is - # removed and the contents of the partial are added, which typically - # contain a newline. We need to send a signal back to the processing - # loop that the moIsBeginning flag needs to be turned on again. - # - # [0] is the content, [1] is that flag. - local "$1" && moIndirectArray "$1" "$moContent" "$moIsBeginning" -} - - -# Internal: Show an environment variable or the output of a function to -# stdout. -# -# Limit/prefix any variables used. -# -# $1 - Name of environment variable or function -# $2 - Current context -# $3 - Arguments string if $1 is a function -# -# Returns nothing. -moShow() { - # Namespace these variables - local moJoined moNameParts moContent - - if moIsFunction "$1"; then - moCallFunction moContent "$1" "" "$3" - moParse "$moContent" "$2" false - return 0 - fi - - moSplit moNameParts "$1" "." - - if [[ -z "${moNameParts[1]-}" ]]; then - if moIsArray "$1"; then - eval moJoin moJoined "," "\${$1[@]}" - echo -n "$moJoined" - else - # shellcheck disable=SC2031 - if moTestVarSet "$1"; then - echo -n "${!1}" - elif [[ -n "${MO_FAIL_ON_UNSET-}" ]]; then - echo "Env variable not set: $1" >&2 - exit 1 - fi - fi - else - # Further subindexes are disallowed - eval "echo -n \"\${${moNameParts[0]}[${moNameParts[1]%%.*}]}\"" - fi -} - - -# Internal: Split a larger string into an array. -# -# $1 - Destination variable -# $2 - String to split -# $3 - Starting delimiter -# $4 - Ending delimiter (optional) -# -# Returns nothing. -moSplit() { - local pos result - - result=( "$2" ) - moFindString pos "${result[0]}" "$3" - - if [[ "$pos" -ne -1 ]]; then - # The first delimiter was found - result[1]=${result[0]:$pos + ${#3}} - result[0]=${result[0]:0:$pos} - - if [[ -n "${4-}" ]]; then - moFindString pos "${result[1]}" "$4" - - if [[ "$pos" -ne -1 ]]; then - # The second delimiter was found - result[2]="${result[1]:$pos + ${#4}}" - result[1]="${result[1]:0:$pos}" - fi - fi - fi - - local "$1" && moIndirectArray "$1" "${result[@]}" -} - - -# Internal: Handle the content for a standalone tag. This means removing -# whitespace (not newlines) before a tag and whitespace and a newline after -# a tag. That is, assuming, that the line is otherwise empty. -# -# $1 - Name of destination "content" variable. -# $2 - Content before the tag that was not yet written -# $3 - Tag content (not used) -# $4 - Content after the tag -# $5 - true/false: is this the beginning of the content? -# -# Returns nothing. -moStandaloneAllowed() { - local bytes - - if moIsStandalone bytes "$2" "$4" "$5"; then - # shellcheck disable=SC2206 - bytes=( $bytes ) - echo -n "${2:0:${bytes[0]}}" - local "$1" && moIndirect "$1" "${4:${bytes[1]}}" - else - echo -n "$2" - local "$1" && moIndirect "$1" "$4" - fi -} - - -# Internal: Handle the content for a tag that is never "standalone". No -# adjustments are made for newlines and whitespace. -# -# $1 - Name of destination "content" variable. -# $2 - Content before the tag that was not yet written -# $3 - Tag content (not used) -# $4 - Content after the tag -# -# Returns nothing. -moStandaloneDenied() { - echo -n "$2" - local "$1" && moIndirect "$1" "$4" -} - - -# Internal: Determines if the named thing is a function or if it is a -# non-empty environment variable. When MO_FALSE_IS_EMPTY is set to a -# non-empty value, then "false" is also treated is an empty value. -# -# Do not use variables without prefixes here if possible as this needs to -# check if any name exists in the environment -# -# $1 - Name of environment variable or function -# $2 - Current value (our context) -# MO_FALSE_IS_EMPTY - When set to a non-empty value, this will say the -# string value "false" is empty. -# -# Returns 0 if the name is not empty, 1 otherwise. When MO_FALSE_IS_EMPTY -# is set, this returns 1 if the name is "false". -moTest() { - # Test for functions - moIsFunction "$1" && return 0 - - if moIsArray "$1"; then - # Arrays must have at least 1 element - eval "[[ \"\${#${1}[@]}\" -gt 0 ]]" && return 0 - else - # If MO_FALSE_IS_EMPTY is set, then return 1 if the value of - # the variable is "false". - # shellcheck disable=SC2031 - [[ -n "${MO_FALSE_IS_EMPTY-}" ]] && [[ "${!1-}" == "false" ]] && return 1 - - # Environment variables must not be empty - [[ -n "${!1-}" ]] && return 0 - fi - - return 1 -} - -# Internal: Determine if a variable is assigned, even if it is assigned an empty -# value. -# -# $1 - Variable name to check. -# -# Returns true (0) if the variable is set, 1 if the variable is unset. -moTestVarSet() { - [[ "${!1-a}" == "${!1-b}" ]] -} - - -# Internal: Trim the leading whitespace only. -# -# $1 - Name of destination variable -# $2 - The string -# $3 - true/false - trim front? -# $4 - true/false - trim end? -# $5-@ - Characters to trim -# -# Returns nothing. -moTrimChars() { - local back current front last target varName - - target=$1 - current=$2 - front=$3 - back=$4 - last="" - shift 4 # Remove target, string, trim front flag, trim end flag - - while [[ "$current" != "$last" ]]; do - last=$current - - for varName in "$@"; do - $front && current="${current/#$varName}" - $back && current="${current/%$varName}" - done - done - - local "$target" && moIndirect "$target" "$current" -} - - -# Internal: Trim leading and trailing whitespace from a string. -# -# $1 - Name of variable to store trimmed string -# $2 - The string -# -# Returns nothing. -moTrimWhitespace() { - local result - - moTrimChars result "$2" true true $'\r' $'\n' $'\t' " " - local "$1" && moIndirect "$1" "$result" -} - - -# Internal: Displays the usage for mo. Pulls this from the file that -# contained the `mo` function. Can only work when the right filename -# comes is the one argument, and that only happens when `mo` is called -# with `$0` set to this file. -# -# $1 - Filename that has the help message -# -# Returns nothing. -moUsage() { - grep '^#/' "${MO_ORIGINAL_COMMAND}" | cut -c 4- - echo "" - echo "MO_VERSION=$MO_VERSION" -} - - -# Save the original command's path for usage later -MO_ORIGINAL_COMMAND="$(cd "${BASH_SOURCE[0]%/*}" || exit 1; pwd)/${BASH_SOURCE[0]##*/}" -MO_VERSION="2.2.0" - -# If sourced, load all functions. -# If executed, perform the actions as expected. -if [[ "$0" == "${BASH_SOURCE[0]}" ]] || [[ -z "${BASH_SOURCE[0]}" ]]; then - mo "$@" -fi diff --git a/docker/certbot/entrypoint.sh b/docker/certbot/entrypoint.sh new file mode 100755 index 00000000..8d291604 --- /dev/null +++ b/docker/certbot/entrypoint.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +# Portal domain requires 3 domain certificates: +# - exact portal domain, ie. example.com +# - wildcard subdomain on portal domain, ie. *.example.com +# used for skylinks served from portal subdomain +# - wildcard subdomain on hns portal domain subdomain, ie. *.hns.example.com +# used for resolving handshake domains +DOMAINS=${PORTAL_DOMAIN},*.${PORTAL_DOMAIN},*.hns.${PORTAL_DOMAIN} + +# Add server domain when it is not empty and different from portal domain +if [ ! -z "${SERVER_DOMAIN}" ] && [ "${PORTAL_DOMAIN}" != "${SERVER_DOMAIN}" ]; then + # In case where server domain is not covered by portal domain's + # wildcard certificate, add server domain name to domains list. + # - server-001.example.com is covered by *.example.com + # - server-001.servers.example.com or server-001.example-severs.com + # are not covered by any already requested wildcard certificates + # + # The condition checks whether server domain does not match portal domain + # with exactly one level of subdomain (portal domain wildcard cert): + # (start) [anything but the dot] + [dot] + [portal domain] (end) + if ! printf "${SERVER_DOMAIN}" | grep -q -E "^[^\.]+\.${PORTAL_DOMAIN}$"; then + DOMAINS=${DOMAINS},${SERVER_DOMAIN} + fi + + # Server domain requires the same set of domain certificates as portal domain. + # Exact server domain case is handled above. + DOMAINS=${DOMAINS},*.${SERVER_DOMAIN},*.hns.${SERVER_DOMAIN} +fi + +# The "wait" will prevent an exit from the script while background tasks are +# still active, so we are adding the line below as a method to prevent orphaning +# the background child processe. The trap fires when docker terminates the container. +trap exit TERM + +while :; do + # Execute certbot and generate or maintain certificates for given domain string. + # --non-interactive: we are running this as an automation so we cannot be prompted + # --agree-tos: required flag marking agreement with letsencrypt tos + # --cert-name: output directory name + # --email: required for generating certificates, used for communication with CA + # --domains: comma separated list of domains (will generate one bundled SAN cert) + # Use CERTBOT_ARGS env variable to pass any additional arguments, ie --dns-route53 + certbot certonly \ + --non-interactive --agree-tos --cert-name skynet-portal \ + --email ${EMAIL_ADDRESS} --domains ${DOMAINS} ${CERTBOT_ARGS} + + # Run a background sleep process that counts down given time + # Certbot docs advise running maintenance process every 12 hours + sleep 12h & + + # Await execution until sleep process is finished (it's a background process) + # Syntax explanation: ${!} expands to a pid of last ran process + wait ${!} +done diff --git a/docker/handshake/Dockerfile b/docker/handshake/Dockerfile deleted file mode 100644 index 7a1fd0eb..00000000 --- a/docker/handshake/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM node:16.13.2-alpine - -WORKDIR /opt/hsd - -RUN apk update && apk add bash unbound-dev gmp-dev g++ gcc make python2 git -RUN git clone https://github.com/handshake-org/hsd.git /opt/hsd && \ - cd /opt/hsd && git checkout v3.0.1 && cd - -RUN npm install --production - -ENV PATH="${PATH}:/opt/hsd/bin:/opt/hsd/node_modules/.bin" - -ENTRYPOINT ["hsd"] diff --git a/docker/malware-scanner/Dockerfile b/docker/malware-scanner/Dockerfile deleted file mode 100644 index 2466e48b..00000000 --- a/docker/malware-scanner/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM golang:1.17.3 -LABEL maintainer="SkynetLabs " - -ENV GOOS linux -ENV GOARCH amd64 - -ARG branch=main - -WORKDIR /root - -RUN git clone --single-branch --branch ${branch} https://github.com/SkynetLabs/malware-scanner.git && \ - cd malware-scanner && \ - go mod download && \ - make release - -ENV SKYNET_DB_HOST="localhost" -ENV SKYNET_DB_PORT="27017" -ENV SKYNET_DB_USER="username" -ENV SKYNET_DB_PASS="password" -ENV CLAMAV_IP=127.0.0.1 -ENV CLAMAV_PORT=3310 - -ENTRYPOINT ["malware-scanner"] diff --git a/docker/nginx/Dockerfile b/docker/nginx/Dockerfile index 43c4b7f9..eca1e3d8 100644 --- a/docker/nginx/Dockerfile +++ b/docker/nginx/Dockerfile @@ -1,4 +1,6 @@ -FROM openresty/openresty:1.19.9.1-bionic +FROM openresty/openresty:1.19.9.1-focal + +WORKDIR / RUN luarocks install lua-resty-http && \ luarocks install hasher && \ @@ -16,7 +18,9 @@ COPY nginx.conf /usr/local/openresty/nginx/conf/nginx.conf CMD [ "bash", "-c", \ "./mo < /etc/nginx/conf.d.templates/server.account.conf > /etc/nginx/conf.d/server.account.conf ; \ ./mo < /etc/nginx/conf.d.templates/server.api.conf > /etc/nginx/conf.d/server.api.conf; \ + ./mo < /etc/nginx/conf.d.templates/server.dnslink.conf > /etc/nginx/conf.d/server.dnslink.conf; \ ./mo < /etc/nginx/conf.d.templates/server.hns.conf > /etc/nginx/conf.d/server.hns.conf; \ ./mo < /etc/nginx/conf.d.templates/server.skylink.conf > /etc/nginx/conf.d/server.skylink.conf ; \ + while :; do sleep 6h & wait ${!}; /usr/local/openresty/bin/openresty -s reload; done & \ /usr/local/openresty/bin/openresty '-g daemon off;'" \ ] diff --git a/docker/nginx/conf.d.templates/server.account.conf b/docker/nginx/conf.d.templates/server.account.conf index 0de88c03..af3b7c4d 100644 --- a/docker/nginx/conf.d.templates/server.account.conf +++ b/docker/nginx/conf.d.templates/server.account.conf @@ -9,8 +9,14 @@ server { server_name account.{{PORTAL_DOMAIN}}; # example: account.siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{PORTAL_DOMAIN}}/wildcard_.{{PORTAL_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{PORTAL_DOMAIN}}/wildcard_.{{PORTAL_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{PORTAL_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { + -- fall back to portal domain if server domain is not defined + if "{{SERVER_DOMAIN}}" == "" then + return "{{PORTAL_DOMAIN}}" + end + return "{{SERVER_DOMAIN}}" + } include /etc/nginx/conf.d/server/server.account; } @@ -28,8 +34,8 @@ server { server_name account.{{SERVER_DOMAIN}}; # example: account.eu-ger-1.siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{SERVER_DOMAIN}}/wildcard_.{{SERVER_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{SERVER_DOMAIN}}/wildcard_.{{SERVER_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{SERVER_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { return "{{SERVER_DOMAIN}}" } include /etc/nginx/conf.d/server/server.account; diff --git a/docker/nginx/conf.d.templates/server.api.conf b/docker/nginx/conf.d.templates/server.api.conf index 1adb58c1..591212ba 100644 --- a/docker/nginx/conf.d.templates/server.api.conf +++ b/docker/nginx/conf.d.templates/server.api.conf @@ -8,8 +8,14 @@ server { server { server_name {{PORTAL_DOMAIN}}; # example: siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/{{PORTAL_DOMAIN}}/{{PORTAL_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/{{PORTAL_DOMAIN}}/{{PORTAL_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{PORTAL_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { + -- fall back to portal domain if server domain is not defined + if "{{SERVER_DOMAIN}}" == "" then + return "{{PORTAL_DOMAIN}}" + end + return "{{SERVER_DOMAIN}}" + } include /etc/nginx/conf.d/server/server.api; } @@ -27,8 +33,8 @@ server { server { server_name {{SERVER_DOMAIN}}; # example: eu-ger-1.siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/{{SERVER_DOMAIN}}/{{SERVER_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/{{SERVER_DOMAIN}}/{{SERVER_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{SERVER_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { return "{{SERVER_DOMAIN}}" } include /etc/nginx/conf.d/server/server.api; diff --git a/docker/nginx/conf.d/server.dnslink.conf b/docker/nginx/conf.d.templates/server.dnslink.conf similarity index 51% rename from docker/nginx/conf.d/server.dnslink.conf rename to docker/nginx/conf.d.templates/server.dnslink.conf index 491bc389..d42ee245 100644 --- a/docker/nginx/conf.d/server.dnslink.conf +++ b/docker/nginx/conf.d.templates/server.dnslink.conf @@ -2,17 +2,24 @@ lua_shared_dict dnslink 10m; server { listen 80 default_server; - listen [::]:80 default_server; include /etc/nginx/conf.d/server/server.dnslink; } server { listen 443 default_server; - listen [::]:443 default_server; ssl_certificate /etc/ssl/local-certificate.crt; ssl_certificate_key /etc/ssl/local-certificate.key; + set_by_lua_block $skynet_portal_domain { return "{{PORTAL_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { + -- fall back to portal domain if server domain is not defined + if "{{SERVER_DOMAIN}}" == "" then + return "{{PORTAL_DOMAIN}}" + end + return "{{SERVER_DOMAIN}}" + } + include /etc/nginx/conf.d/server/server.dnslink; } diff --git a/docker/nginx/conf.d.templates/server.hns.conf b/docker/nginx/conf.d.templates/server.hns.conf index a40427f1..0e4f21f3 100644 --- a/docker/nginx/conf.d.templates/server.hns.conf +++ b/docker/nginx/conf.d.templates/server.hns.conf @@ -8,9 +8,15 @@ server { server { server_name *.hns.{{PORTAL_DOMAIN}}; # example: *.hns.siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.hns.{{PORTAL_DOMAIN}}/wildcard_.hns.{{PORTAL_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.hns.{{PORTAL_DOMAIN}}/wildcard_.hns.{{PORTAL_DOMAIN}}.key; - + set_by_lua_block $skynet_portal_domain { return "{{PORTAL_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { + -- fall back to portal domain if server domain is not defined + if "{{SERVER_DOMAIN}}" == "" then + return "{{PORTAL_DOMAIN}}" + end + return "{{SERVER_DOMAIN}}" + } + proxy_set_header Host {{PORTAL_DOMAIN}}; include /etc/nginx/conf.d/server/server.hns; } @@ -28,8 +34,8 @@ server { server { server_name *.hns.{{SERVER_DOMAIN}}; # example: *.hns.eu-ger-1.siasky.net - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.hns.{{SERVER_DOMAIN}}/wildcard_.hns.{{SERVER_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.hns.{{SERVER_DOMAIN}}/wildcard_.hns.{{SERVER_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{SERVER_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { return "{{SERVER_DOMAIN}}" } proxy_set_header Host {{SERVER_DOMAIN}}; include /etc/nginx/conf.d/server/server.hns; diff --git a/docker/nginx/conf.d.templates/server.skylink.conf b/docker/nginx/conf.d.templates/server.skylink.conf index 0cfc2027..a97e240c 100644 --- a/docker/nginx/conf.d.templates/server.skylink.conf +++ b/docker/nginx/conf.d.templates/server.skylink.conf @@ -7,10 +7,16 @@ server { server { server_name *.{{PORTAL_DOMAIN}}; # example: *.siasky.net - - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{PORTAL_DOMAIN}}/wildcard_.{{PORTAL_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{PORTAL_DOMAIN}}/wildcard_.{{PORTAL_DOMAIN}}.key; + set_by_lua_block $skynet_portal_domain { return "{{PORTAL_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { + -- fall back to portal domain if server domain is not defined + if "{{SERVER_DOMAIN}}" == "" then + return "{{PORTAL_DOMAIN}}" + end + return "{{SERVER_DOMAIN}}" + } + include /etc/nginx/conf.d/server/server.skylink; } {{/PORTAL_DOMAIN}} @@ -26,9 +32,9 @@ server { server { server_name *.{{SERVER_DOMAIN}}; # example: *.eu-ger-1.siasky.net - - ssl_certificate /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{SERVER_DOMAIN}}/wildcard_.{{SERVER_DOMAIN}}.crt; - ssl_certificate_key /data/caddy/caddy/certificates/acme-v02.api.letsencrypt.org-directory/wildcard_.{{SERVER_DOMAIN}}/wildcard_.{{SERVER_DOMAIN}}.key; + + set_by_lua_block $skynet_portal_domain { return "{{SERVER_DOMAIN}}" } + set_by_lua_block $skynet_server_domain { return "{{SERVER_DOMAIN}}" } include /etc/nginx/conf.d/server/server.skylink; diff --git a/docker/nginx/conf.d/include/cors-headers b/docker/nginx/conf.d/include/cors-headers index 58369b65..f6a303cd 100644 --- a/docker/nginx/conf.d/include/cors-headers +++ b/docker/nginx/conf.d/include/cors-headers @@ -1,5 +1,5 @@ more_set_headers 'Access-Control-Allow-Origin: $http_origin'; more_set_headers 'Access-Control-Allow-Credentials: true'; more_set_headers 'Access-Control-Allow-Methods: GET, POST, HEAD, OPTIONS, PUT, PATCH, DELETE'; -more_set_headers 'Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,If-None-Match,Cache-Control,Content-Type,Range,X-HTTP-Method-Override,upload-offset,upload-metadata,upload-length,tus-version,tus-resumable,tus-extension,tus-max-size,upload-concat,location'; -more_set_headers 'Access-Control-Expose-Headers: Content-Length,Content-Range,ETag,Skynet-File-Metadata,Skynet-Skylink,Skynet-Proof,Skynet-Portal-Api,Skynet-Server-Api,upload-offset,upload-metadata,upload-length,tus-version,tus-resumable,tus-extension,tus-max-size,upload-concat,location'; +more_set_headers 'Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,If-None-Match,Cache-Control,Content-Type,Range,X-HTTP-Method-Override,upload-offset,upload-metadata,upload-length,tus-version,tus-resumable,tus-extension,tus-max-size,upload-concat,location,Skynet-API-Key'; +more_set_headers 'Access-Control-Expose-Headers: Content-Length,Content-Range,ETag,Accept-Ranges,Skynet-File-Metadata,Skynet-Skylink,Skynet-Proof,Skynet-Portal-Api,Skynet-Server-Api,upload-offset,upload-metadata,upload-length,tus-version,tus-resumable,tus-extension,tus-max-size,upload-concat,location'; diff --git a/docker/nginx/conf.d/include/init-optional-variables b/docker/nginx/conf.d/include/init-optional-variables index e072c6e6..406dfe98 100644 --- a/docker/nginx/conf.d/include/init-optional-variables +++ b/docker/nginx/conf.d/include/init-optional-variables @@ -13,6 +13,3 @@ set $skylink ""; # cached account limits (json string) - applies only if accounts are enabled set $account_limits ""; - -# set this internal flag to true if current request should not be limited in any way -set $internal_no_limits "false"; diff --git a/docker/nginx/conf.d/include/location-hns b/docker/nginx/conf.d/include/location-hns index 22e50317..0ddb62d7 100644 --- a/docker/nginx/conf.d/include/location-hns +++ b/docker/nginx/conf.d/include/location-hns @@ -1,4 +1,3 @@ -include /etc/nginx/conf.d/include/proxy-buffer; include /etc/nginx/conf.d/include/proxy-pass-internal; include /etc/nginx/conf.d/include/portal-access-check; @@ -81,8 +80,8 @@ proxy_pass https://127.0.0.1/$skylink$path$is_args$args; # in case siad returns location header, we need to replace the skylink with the domain name header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") + ngx.header["Skynet-Portal-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_portal_domain + ngx.header["Skynet-Server-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_server_domain if ngx.header.location then -- match location redirect part after the skylink diff --git a/docker/nginx/conf.d/include/location-skylink b/docker/nginx/conf.d/include/location-skylink index b0d2066e..995a6e2d 100644 --- a/docker/nginx/conf.d/include/location-skylink +++ b/docker/nginx/conf.d/include/location-skylink @@ -1,6 +1,4 @@ include /etc/nginx/conf.d/include/cors; -include /etc/nginx/conf.d/include/proxy-buffer; -include /etc/nginx/conf.d/include/proxy-cache-downloads; include /etc/nginx/conf.d/include/track-download; limit_conn downloads_by_ip 100; # ddos protection: max 100 downloads at a time @@ -9,59 +7,10 @@ limit_conn downloads_by_ip 100; # ddos protection: max 100 downloads at a time # this is important because we want only one format in cache keys and logs set_by_lua_block $skylink { return require("skynet.skylink").parse(ngx.var.skylink) } -# $skylink_v1 and $skylink_v2 variables default to the same value but in case the requested skylink was: -# a) skylink v1 - it would not matter, no additional logic is executed -# b) skylink v2 - in a lua block below we will resolve the skylink v2 into skylink v1 and update -# $skylink_v1 variable so then the proxy request to skyd can be cached in nginx (proxy_cache_key -# in proxy-cache-downloads includes $skylink_v1 as a part of the cache key) -set $skylink_v1 $skylink; -set $skylink_v2 $skylink; - -# variable for Skynet-Proof header that we need to inject -# into a response if the request was for skylink v2 -set $skynet_proof ''; - # default download rate to unlimited set $limit_rate 0; access_by_lua_block { - local httpc = require("resty.http").new() - - -- detect whether requested skylink is v2 - local isBase32v2 = string.len(ngx.var.skylink) == 55 and string.sub(ngx.var.skylink, 0, 2) == "04" - local isBase64v2 = string.len(ngx.var.skylink) == 46 and string.sub(ngx.var.skylink, 0, 2) == "AQ" - - if isBase32v2 or isBase64v2 then - -- 10.10.10.10 points to sia service (alias not available when using resty-http) - local res, err = httpc:request_uri("http://10.10.10.10:9980/skynet/resolve/" .. ngx.var.skylink_v2, { - headers = { ["User-Agent"] = "Sia-Agent" } - }) - - -- print error and exit with 500 or exit with response if status is not 200 - if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.status = (err and ngx.HTTP_INTERNAL_SERVER_ERROR) or res.status - ngx.header["content-type"] = "text/plain" - ngx.say(err or res.body) - return ngx.exit(ngx.status) - end - - local json = require('cjson') - local resolve = json.decode(res.body) - ngx.var.skylink_v1 = resolve.skylink - ngx.var.skynet_proof = res.headers["Skynet-Proof"] - end - - -- check if skylink v1 is present on blocklist (compare hashes) - if require("skynet.blocklist").is_blocked(ngx.var.skylink_v1) then - return require("skynet.blocklist").exit_illegal() - end - - -- if skylink is found on nocache list then set internal nocache variable - -- to tell nginx that it should not try and cache this file (too large) - if ngx.shared.nocache:get(ngx.var.skylink_v1) then - ngx.var.nocache = "1" - end - if require("skynet.account").accounts_enabled() then -- check if portal is in authenticated only mode if require("skynet.account").is_access_unauthorized() then @@ -81,33 +30,10 @@ access_by_lua_block { end } -header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") - - -- not empty skynet_proof means this is a skylink v2 request - -- so we should replace the Skynet-Proof header with the one - -- we got from /skynet/resolve/ endpoint, otherwise we would - -- be serving cached empty v1 skylink Skynet-Proof header - if ngx.var.skynet_proof and ngx.var.skynet_proof ~= "" then - ngx.header["Skynet-Proof"] = ngx.var.skynet_proof - end - - -- add skylink to nocache list if it exceeds 1GB (1e+9 bytes) threshold - -- (content length can be nil for already cached files - we can ignore them) - if ngx.header["Content-Length"] and tonumber(ngx.header["Content-Length"]) > 1e+9 then - ngx.shared.nocache:set(ngx.var.skylink_v1, ngx.header["Content-Length"]) - end -} - limit_rate_after 512k; limit_rate $limit_rate; proxy_read_timeout 600; proxy_set_header User-Agent: Sia-Agent; -# in case the requested skylink was v2 and we already resolved it to skylink v1, we are going to pass resolved -# skylink v1 to skyd to save that extra skylink v2 lookup in skyd but in turn, in case skyd returns a redirect -# we need to rewrite the skylink v1 to skylink v2 in the location header with proxy_redirect -proxy_redirect $skylink_v1 $skylink_v2; -proxy_pass http://sia:9980/skynet/skylink/$skylink_v1$path$is_args$args; +proxy_pass http://sia:9980/skynet/skylink/$skylink$path$is_args$args; diff --git a/docker/nginx/conf.d/include/proxy-buffer b/docker/nginx/conf.d/include/proxy-buffer deleted file mode 100644 index 0fafbade..00000000 --- a/docker/nginx/conf.d/include/proxy-buffer +++ /dev/null @@ -1,5 +0,0 @@ -# if you are expecting large headers (ie. Skynet-Skyfile-Metadata), tune these values to your needs -# read more: https://www.getpagespeed.com/server-setup/nginx/tuning-proxy_buffer_size-in-nginx -proxy_buffer_size 4096k; -proxy_buffers 64 256k; -proxy_busy_buffers_size 4096k; # at least as high as proxy_buffer_size diff --git a/docker/nginx/conf.d/include/proxy-cache-downloads b/docker/nginx/conf.d/include/proxy-cache-downloads deleted file mode 100644 index 8481ebb9..00000000 --- a/docker/nginx/conf.d/include/proxy-cache-downloads +++ /dev/null @@ -1,14 +0,0 @@ -proxy_cache skynet; # cache name -proxy_cache_key $skylink_v1$path$arg_format$arg_attachment$arg_start$arg_end$http_range; # unique cache key -proxy_cache_min_uses 3; # cache after 3 uses -proxy_cache_valid 200 206 307 308 48h; # keep 200, 206, 307 and 308 responses valid for up to 2 days -add_header X-Proxy-Cache $upstream_cache_status; # add response header to indicate cache hits and misses - -# bypass - this will bypass cache hit on request (status BYPASS) -# but still stores file in cache if cache conditions are met -proxy_cache_bypass $cookie_nocache $arg_nocache; - -# no cache - this will ignore cache on request (status MISS) -# and does not store file in cache under no condition -set_if_empty $nocache "0"; -proxy_no_cache $nocache; diff --git a/docker/nginx/conf.d/include/ssl-settings b/docker/nginx/conf.d/include/ssl-settings index e4a82394..545d372b 100644 --- a/docker/nginx/conf.d/include/ssl-settings +++ b/docker/nginx/conf.d/include/ssl-settings @@ -1,7 +1,10 @@ # https://ssl-config.mozilla.org/#server=nginx&version=1.17.7&config=intermediate&openssl=1.1.1d&hsts=false&ocsp=false&guideline=5.6 +ssl_certificate /etc/letsencrypt/live/skynet-portal/fullchain.pem; +ssl_certificate_key /etc/letsencrypt/live/skynet-portal/privkey.pem; + ssl_session_timeout 1d; -ssl_session_cache shared:MozSSL:10m; # about 40000 sessions +ssl_session_cache shared:MozSSL:10m; # about 40000 sessions ssl_session_tickets off; # curl https://ssl-config.mozilla.org/ffdhe2048.txt > /path/to/dhparam @@ -11,3 +14,13 @@ ssl_dhparam /etc/nginx/conf.d/dhparam.pem; ssl_protocols TLSv1.2 TLSv1.3; ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384; ssl_prefer_server_ciphers off; + +# HSTS (ngx_http_headers_module is required) (63072000 seconds) +add_header Strict-Transport-Security "max-age=63072000" always; + +# OCSP stapling +ssl_stapling on; +ssl_stapling_verify on; + +# verify chain of trust of OCSP response using Root CA and Intermediate certs +ssl_trusted_certificate /etc/letsencrypt/live/skynet-portal/chain.pem; diff --git a/docker/nginx/conf.d/include/track-download b/docker/nginx/conf.d/include/track-download index 606c98ad..4e12fd41 100644 --- a/docker/nginx/conf.d/include/track-download +++ b/docker/nginx/conf.d/include/track-download @@ -1,8 +1,9 @@ -# register the download in accounts service (cookies should contain jwt) log_by_lua_block { - -- this block runs only when accounts are enabled - if require("skynet.account").accounts_enabled() then - local function track(premature, skylink, status, body_bytes_sent, jwt) + local skynet_account = require("skynet.account") + + -- tracking runs only when request comes from authenticated user + if skynet_account.is_authenticated() then + local function track(premature, skylink, status, body_bytes_sent, auth_headers) if premature then return end local httpc = require("resty.http").new() @@ -11,16 +12,18 @@ log_by_lua_block { -- 10.10.10.70 points to accounts service (alias not available when using resty-http) local res, err = httpc:request_uri("http://10.10.10.70:3000/track/download/" .. skylink .. "?" .. query, { method = "POST", - headers = { ["Cookie"] = "skynet-jwt=" .. jwt }, + headers = auth_headers, }) if err or (res and res.status ~= ngx.HTTP_NO_CONTENT) then - ngx.log(ngx.ERR, "Failed accounts service request /track/download/" .. skylink .. ": ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed accounts service request /track/download/" .. skylink .. ": ", error_response) end end - if ngx.header["Skynet-Skylink"] and ngx.var.skynet_jwt ~= "" and ngx.status >= ngx.HTTP_OK and ngx.status < ngx.HTTP_SPECIAL_RESPONSE then - local ok, err = ngx.timer.at(0, track, ngx.header["Skynet-Skylink"], ngx.status, ngx.var.body_bytes_sent, ngx.var.skynet_jwt) + if ngx.header["Skynet-Skylink"] and ngx.status >= ngx.HTTP_OK and ngx.status < ngx.HTTP_SPECIAL_RESPONSE then + local auth_headers = skynet_account.get_auth_headers() + local ok, err = ngx.timer.at(0, track, ngx.header["Skynet-Skylink"], ngx.status, ngx.var.body_bytes_sent, auth_headers) if err then ngx.log(ngx.ERR, "Failed to create timer: ", err) end end end @@ -38,7 +41,8 @@ log_by_lua_block { }) if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.log(ngx.ERR, "Failed malware-scanner request /scan/" .. skylink .. ": ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed malware-scanner request /scan/" .. skylink .. ": ", error_response) end end diff --git a/docker/nginx/conf.d/include/track-registry b/docker/nginx/conf.d/include/track-registry index 8c69172b..2c840491 100644 --- a/docker/nginx/conf.d/include/track-registry +++ b/docker/nginx/conf.d/include/track-registry @@ -1,29 +1,32 @@ -# register the registry access in accounts service (cookies should contain jwt) log_by_lua_block { - -- this block runs only when accounts are enabled - if require("skynet.account").accounts_enabled() then - local function track(premature, request_method, jwt) + local skynet_account = require("skynet.account") + + -- tracking runs only when request comes from authenticated user + if skynet_account.is_authenticated() then + local function track(premature, request_method, auth_headers) if premature then return end local httpc = require("resty.http").new() - -- based on request method we assign a registry action string used + -- based on request method we assign a registry action string used -- in track endpoint namely "read" for GET and "write" for POST local registry_action = request_method == "GET" and "read" or "write" -- 10.10.10.70 points to accounts service (alias not available when using resty-http) local res, err = httpc:request_uri("http://10.10.10.70:3000/track/registry/" .. registry_action, { method = "POST", - headers = { ["Cookie"] = "skynet-jwt=" .. jwt }, + headers = auth_headers, }) - + if err or (res and res.status ~= ngx.HTTP_NO_CONTENT) then - ngx.log(ngx.ERR, "Failed accounts service request /track/registry/" .. registry_action .. ": ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed accounts service request /track/registry/" .. registry_action .. ": ", error_response) end end - if ngx.var.skynet_jwt ~= "" and (ngx.status == ngx.HTTP_OK or ngx.status == ngx.HTTP_NOT_FOUND) then - local ok, err = ngx.timer.at(0, track, ngx.req.get_method(), ngx.var.skynet_jwt) + if ngx.status == ngx.HTTP_OK or ngx.status == ngx.HTTP_NOT_FOUND then + local auth_headers = skynet_account.get_auth_headers() + local ok, err = ngx.timer.at(0, track, ngx.req.get_method(), auth_headers) if err then ngx.log(ngx.ERR, "Failed to create timer: ", err) end end end diff --git a/docker/nginx/conf.d/include/track-upload b/docker/nginx/conf.d/include/track-upload index 340dd437..36b12b9e 100644 --- a/docker/nginx/conf.d/include/track-upload +++ b/docker/nginx/conf.d/include/track-upload @@ -1,8 +1,9 @@ -# register the upload in accounts service (cookies should contain jwt) log_by_lua_block { - -- this block runs only when accounts are enabled - if require("skynet.account").accounts_enabled() then - local function track(premature, skylink, jwt) + local skynet_account = require("skynet.account") + + -- tracking runs only when request comes from authenticated user + if skynet_account.is_authenticated() then + local function track(premature, skylink, auth_headers) if premature then return end local httpc = require("resty.http").new() @@ -10,17 +11,19 @@ log_by_lua_block { -- 10.10.10.70 points to accounts service (alias not available when using resty-http) local res, err = httpc:request_uri("http://10.10.10.70:3000/track/upload/" .. skylink, { method = "POST", - headers = { ["Cookie"] = "skynet-jwt=" .. jwt }, + headers = auth_headers, }) if err or (res and res.status ~= ngx.HTTP_NO_CONTENT) then - ngx.log(ngx.ERR, "Failed accounts service request /track/upload/" .. skylink .. ": ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed accounts service request /track/upload/" .. skylink .. ": ", error_response) end end -- report all skylinks (header empty if request failed) but only if jwt is preset (user is authenticated) - if ngx.header["Skynet-Skylink"] and ngx.var.skynet_jwt ~= "" then - local ok, err = ngx.timer.at(0, track, ngx.header["Skynet-Skylink"], ngx.var.skynet_jwt) + if ngx.header["Skynet-Skylink"] then + local auth_headers = skynet_account.get_auth_headers() + local ok, err = ngx.timer.at(0, track, ngx.header["Skynet-Skylink"], auth_headers) if err then ngx.log(ngx.ERR, "Failed to create timer: ", err) end end end @@ -38,7 +41,8 @@ log_by_lua_block { }) if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.log(ngx.ERR, "Failed malware-scanner request /scan/" .. skylink .. ": ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed malware-scanner request /scan/" .. skylink .. ": ", error_response) end end diff --git a/docker/nginx/conf.d/server.local.conf b/docker/nginx/conf.d/server.local.conf deleted file mode 100644 index 6c5af504..00000000 --- a/docker/nginx/conf.d/server.local.conf +++ /dev/null @@ -1,10 +0,0 @@ -server { - # local server - do not expose this port externally - listen 8000; - listen [::]:8000; - - # secure traffic by limiting to only local networks - include /etc/nginx/conf.d/include/local-network-only; - - include /etc/nginx/conf.d/server/server.local; -} diff --git a/docker/nginx/conf.d/server/server.account b/docker/nginx/conf.d/server/server.account index 2fb5551d..127ba4bf 100644 --- a/docker/nginx/conf.d/server/server.account +++ b/docker/nginx/conf.d/server/server.account @@ -1,5 +1,4 @@ listen 443 ssl http2; -listen [::]:443 ssl http2; include /etc/nginx/conf.d/include/ssl-settings; include /etc/nginx/conf.d/include/init-optional-variables; @@ -36,9 +35,23 @@ location /api/register { proxy_pass http://accounts:3000; } +location /api/user/pubkey/register { + include /etc/nginx/conf.d/include/cors; + + rewrite /api/(.*) /$1 break; + proxy_pass http://accounts:3000; +} + location /api/login { include /etc/nginx/conf.d/include/cors; rewrite /api/(.*) /$1 break; proxy_pass http://accounts:3000; } + +location /api/logout { + include /etc/nginx/conf.d/include/cors; + + rewrite /api/(.*) /$1 break; + proxy_pass http://accounts:3000; +} diff --git a/docker/nginx/conf.d/server/server.api b/docker/nginx/conf.d/server/server.api index bcdd3705..5bd1aa61 100644 --- a/docker/nginx/conf.d/server/server.api +++ b/docker/nginx/conf.d/server/server.api @@ -1,5 +1,4 @@ listen 443 ssl http2; -listen [::]:443 ssl http2; include /etc/nginx/conf.d/include/ssl-settings; include /etc/nginx/conf.d/include/init-optional-variables; @@ -22,40 +21,42 @@ client_max_body_size 128k; rewrite ^/portals /skynet/portals permanent; rewrite ^/stats /skynet/stats permanent; rewrite ^/skynet/blacklist /skynet/blocklist permanent; +rewrite ^/docs(?:/(.*))?$ https://sdk.skynetlabs.com/$1 permanent; location / { include /etc/nginx/conf.d/include/cors; - set $skylink "0404dsjvti046fsua4ktor9grrpe76erq9jot9cvopbhsvsu76r4r30"; - set $path $uri; - set $internal_no_limits "true"; - - include /etc/nginx/conf.d/include/location-skylink; - - proxy_intercept_errors on; - error_page 400 404 490 500 502 503 504 =200 @fallback; -} - -location @fallback { proxy_pass http://website:9000; } -location /docs { - proxy_pass https://skynetlabs.github.io/skynet-docs; -} - location /skynet/blocklist { include /etc/nginx/conf.d/include/cors; + add_header X-Proxy-Cache $upstream_cache_status; + proxy_cache skynet; proxy_cache_valid any 1m; # cache blocklist for 1 minute proxy_set_header User-Agent: Sia-Agent; proxy_pass http://sia:9980/skynet/blocklist; } +location /skynet/portal/blocklist { + include /etc/nginx/conf.d/include/cors; + + add_header X-Proxy-Cache $upstream_cache_status; + + proxy_cache skynet; + proxy_cache_valid 200 204 15m; # cache portal blocklist for 15 minutes + + # 10.10.10.110 points to blocker service + proxy_pass http://10.10.10.110:4000/blocklist; +} + location /skynet/portals { include /etc/nginx/conf.d/include/cors; + add_header X-Proxy-Cache $upstream_cache_status; + proxy_cache skynet; proxy_cache_valid any 1m; # cache portals for 1 minute proxy_set_header User-Agent: Sia-Agent; @@ -65,6 +66,8 @@ location /skynet/portals { location /skynet/stats { include /etc/nginx/conf.d/include/cors; + add_header X-Proxy-Cache $upstream_cache_status; + proxy_cache skynet; proxy_cache_valid any 1m; # cache stats for 1 minute proxy_set_header User-Agent: Sia-Agent; @@ -90,6 +93,8 @@ location /serverload { location /skynet/health { include /etc/nginx/conf.d/include/cors; + add_header X-Proxy-Cache $upstream_cache_status; + proxy_cache skynet; proxy_cache_key $request_uri; # use whole request uri (uri + args) as cache key proxy_cache_valid any 1m; # cache responses for 1 minute @@ -169,24 +174,25 @@ location /skynet/registry/subscription { set $notificationdelay "0"; rewrite_by_lua_block { - -- this block runs only when accounts are enabled - if os.getenv("PORTAL_MODULES"):match("a") then - local httpc = require("resty.http").new() + local skynet_account = require("skynet.account") - -- fetch account limits and set download bandwidth and registry delays accordingly - local res, err = httpc:request_uri("http://10.10.10.70:3000/user/limits", { - headers = { ["Cookie"] = "skynet-jwt=" .. ngx.var.skynet_jwt } - }) - - -- fail gracefully in case /user/limits failed - if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.log(ngx.ERR, "Failed accounts service request /user/limits: ", err or ("[HTTP " .. res.status .. "] " .. res.body)) - elseif res and res.status == ngx.HTTP_OK then - local json = require('cjson') - local limits = json.decode(res.body) - ngx.var.bandwidthlimit = limits.download - ngx.var.notificationdelay = limits.registry + if skynet_account.accounts_enabled() then + -- check if portal is in authenticated only mode + if skynet_account.is_access_unauthorized() then + return skynet_account.exit_access_unauthorized() end + + -- check if portal is in subscription only mode + if skynet_account.is_access_forbidden() then + return skynet_account.exit_access_forbidden() + end + + -- get account limits of currently authenticated user + local limits = skynet_account.get_account_limits() + + -- apply bandwidth limit and notification delay + ngx.var.bandwidthlimit = limits.download + ngx.var.notificationdelay = limits.registry end } @@ -235,8 +241,8 @@ location /skynet/tus { limit_conn upload_conn 5; limit_conn upload_conn_rl 1; - # TUS chunks size is 40M + leaving 10M of breathing room - client_max_body_size 50M; + # Do not limit body size in nginx, skyd will reject early on too large upload + client_max_body_size 0; # Those timeouts need to be elevated since skyd can stall reading # data for a while when overloaded which would terminate connection @@ -248,27 +254,28 @@ location /skynet/tus { proxy_set_header X-Forwarded-Proto $scheme; # rewrite proxy request to use correct host uri from env variable (required to return correct location header) - set_by_lua $SKYNET_SERVER_API 'return os.getenv("SKYNET_SERVER_API")'; - proxy_redirect $scheme://$host $SKYNET_SERVER_API; + proxy_redirect $scheme://$host $scheme://$skynet_server_domain; # proxy /skynet/tus requests to siad endpoint with all arguments proxy_pass http://sia:9980; access_by_lua_block { - if require("skynet.account").accounts_enabled() then + local skynet_account = require("skynet.account") + + if skynet_account.accounts_enabled() then -- check if portal is in authenticated only mode - if require("skynet.account").is_access_unauthorized() then - return require("skynet.account").exit_access_unauthorized() + if skynet_account.is_access_unauthorized() then + return skynet_account.exit_access_unauthorized() end -- check if portal is in subscription only mode - if require("skynet.account").is_access_forbidden() then - return require("skynet.account").exit_access_forbidden() + if skynet_account.is_access_forbidden() then + return skynet_account.exit_access_forbidden() end - + -- get account limits of currently authenticated user - local limits = require("skynet.account").get_account_limits() - + local limits = skynet_account.get_account_limits() + -- apply upload size limits ngx.req.set_header("SkynetMaxUploadSize", limits.maxUploadSize) end @@ -276,8 +283,8 @@ location /skynet/tus { # extract skylink from base64 encoded upload metadata and assign to a proper header header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") + ngx.header["Skynet-Portal-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_portal_domain + ngx.header["Skynet-Server-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_server_domain if ngx.header["Upload-Metadata"] then local encodedSkylink = string.match(ngx.header["Upload-Metadata"], "Skylink ([^,?]+)") @@ -311,8 +318,8 @@ location /skynet/metadata { include /etc/nginx/conf.d/include/portal-access-check; header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") + ngx.header["Skynet-Portal-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_portal_domain + ngx.header["Skynet-Server-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_server_domain } proxy_set_header User-Agent: Sia-Agent; @@ -324,8 +331,8 @@ location /skynet/resolve { include /etc/nginx/conf.d/include/portal-access-check; header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") + ngx.header["Skynet-Portal-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_portal_domain + ngx.header["Skynet-Server-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_server_domain } proxy_set_header User-Agent: Sia-Agent; @@ -348,6 +355,44 @@ location ~ "^/file/(([a-zA-Z0-9-_]{46}|[a-z0-9]{55})(/.*)?)$" { include /etc/nginx/conf.d/include/location-skylink; } +location /skynet/trustless/basesector { + include /etc/nginx/conf.d/include/cors; + include /etc/nginx/conf.d/include/track-download; + + limit_conn downloads_by_ip 100; # ddos protection: max 100 downloads at a time + + # default download rate to unlimited + set $limit_rate 0; + + access_by_lua_block { + local skynet_account = require("skynet.account") + + if skynet_account.accounts_enabled() then + -- check if portal is in authenticated only mode + if skynet_account.is_access_unauthorized() then + return skynet_account.exit_access_unauthorized() + end + + -- check if portal is in subscription only mode + if skynet_account.is_access_forbidden() then + return skynet_account.exit_access_forbidden() + end + + -- get account limits of currently authenticated user + local limits = skynet_account.get_account_limits() + + -- apply download speed limit + ngx.var.limit_rate = limits.download + end + } + + limit_rate_after 512k; + limit_rate $limit_rate; + + proxy_set_header User-Agent: Sia-Agent; + proxy_pass http://sia:9980; +} + location /__internal/do/not/use/accounts { include /etc/nginx/conf.d/include/cors; @@ -357,14 +402,20 @@ location /__internal/do/not/use/accounts { content_by_lua_block { local json = require('cjson') - local accounts_enabled = require("skynet.account").accounts_enabled() - local is_auth_required = require("skynet.account").is_auth_required() - local is_authenticated = accounts_enabled and require("skynet.account").is_authenticated() + local skynet_account = require("skynet.account") + + local accounts_enabled = skynet_account.accounts_enabled() + local is_auth_required = skynet_account.is_auth_required() + local is_subscription_required = skynet_account.is_subscription_required() + local is_authenticated = skynet_account.is_authenticated() + local has_subscription = skynet_account.has_subscription() ngx.say(json.encode{ enabled = accounts_enabled, auth_required = is_auth_required, + subscription_required = is_subscription_required, authenticated = is_authenticated, + subscription = has_subscription, }) return ngx.exit(ngx.HTTP_OK) } diff --git a/docker/nginx/conf.d/server/server.dnslink b/docker/nginx/conf.d/server/server.dnslink index 32e454cc..cf385a1d 100644 --- a/docker/nginx/conf.d/server/server.dnslink +++ b/docker/nginx/conf.d/server/server.dnslink @@ -38,8 +38,6 @@ location / { end ngx.var.skylink = require("skynet.skylink").parse(ngx.var.skylink) - ngx.var.skylink_v1 = ngx.var.skylink - ngx.var.skylink_v2 = ngx.var.skylink } include /etc/nginx/conf.d/include/location-skylink; diff --git a/docker/nginx/conf.d/server/server.hns b/docker/nginx/conf.d/server/server.hns index 3daa167f..9e68dc0b 100644 --- a/docker/nginx/conf.d/server/server.hns +++ b/docker/nginx/conf.d/server/server.hns @@ -1,5 +1,4 @@ listen 443 ssl http2; -listen [::]:443 ssl http2; include /etc/nginx/conf.d/include/ssl-settings; include /etc/nginx/conf.d/include/init-optional-variables; diff --git a/docker/nginx/conf.d/server/server.http b/docker/nginx/conf.d/server/server.http index 77cce00a..22ec6f30 100644 --- a/docker/nginx/conf.d/server/server.http +++ b/docker/nginx/conf.d/server/server.http @@ -1,5 +1,4 @@ listen 80; -listen [::]:80; include /etc/nginx/conf.d/include/init-optional-variables; diff --git a/docker/nginx/conf.d/server/server.local b/docker/nginx/conf.d/server/server.local deleted file mode 100644 index 3a8ac118..00000000 --- a/docker/nginx/conf.d/server/server.local +++ /dev/null @@ -1,37 +0,0 @@ -include /etc/nginx/conf.d/include/init-optional-variables; - -location /skynet/blocklist { - client_max_body_size 10m; # increase max body size to account for large lists - client_body_buffer_size 10m; # force whole body to memory so we can read it - - content_by_lua_block { - local httpc = require("resty.http").new() - - ngx.req.read_body() -- ensure the post body data is read before using get_body_data - - -- proxy blocklist update request - -- 10.10.10.10 points to sia service (alias not available when using resty-http) - local res, err = httpc:request_uri("http://10.10.10.10:9980/skynet/blocklist", { - method = "POST", - body = ngx.req.get_body_data(), - headers = { - ["Content-Type"] = "application/x-www-form-urlencoded", - ["Authorization"] = require("skynet.utils").authorization_header(), - ["User-Agent"] = "Sia-Agent", - } - }) - - -- print error and exit with 500 or exit with response if status is not 204 - if err or (res and res.status ~= ngx.HTTP_NO_CONTENT) then - ngx.status = (err and ngx.HTTP_INTERNAL_SERVER_ERROR) or res.status - ngx.header["content-type"] = "text/plain" - ngx.say(err or res.body) - return ngx.exit(ngx.status) - end - - require("skynet.blocklist").reload() - - ngx.status = ngx.HTTP_NO_CONTENT - return ngx.exit(ngx.status) - } -} diff --git a/docker/nginx/conf.d/server/server.skylink b/docker/nginx/conf.d/server/server.skylink index a8f659f1..7f628989 100644 --- a/docker/nginx/conf.d/server/server.skylink +++ b/docker/nginx/conf.d/server/server.skylink @@ -1,5 +1,4 @@ listen 443 ssl http2; -listen [::]:443 ssl http2; include /etc/nginx/conf.d/include/ssl-settings; include /etc/nginx/conf.d/include/init-optional-variables; diff --git a/docker/nginx/libs/basexx.lua b/docker/nginx/libs/basexx.lua index b077ee9a..b53c7337 100644 --- a/docker/nginx/libs/basexx.lua +++ b/docker/nginx/libs/basexx.lua @@ -21,7 +21,7 @@ local function divide_string( str, max ) return result end - + local function number_to_bit( num, length ) local bits = {} @@ -144,7 +144,7 @@ function basexx.to_basexx( str, alphabet, bits, pad ) end table.insert( result, pad ) - return table.concat( result ) + return table.concat( result ) end -------------------------------------------------------------------------------- @@ -225,16 +225,16 @@ local function length_error( len, d ) end local z85Decoder = { 0x00, 0x44, 0x00, 0x54, 0x53, 0x52, 0x48, 0x00, - 0x4B, 0x4C, 0x46, 0x41, 0x00, 0x3F, 0x3E, 0x45, - 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x40, 0x00, 0x49, 0x42, 0x4A, 0x47, - 0x51, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, - 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32, - 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, - 0x3B, 0x3C, 0x3D, 0x4D, 0x00, 0x4E, 0x43, 0x00, - 0x00, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, - 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, - 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, + 0x4B, 0x4C, 0x46, 0x41, 0x00, 0x3F, 0x3E, 0x45, + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x40, 0x00, 0x49, 0x42, 0x4A, 0x47, + 0x51, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, + 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32, + 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, + 0x3B, 0x3C, 0x3D, 0x4D, 0x00, 0x4E, 0x43, 0x00, + 0x00, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, + 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22, 0x23, 0x4F, 0x00, 0x50, 0x00, 0x00 } function basexx.from_z85( str, ignore ) diff --git a/docker/nginx/libs/skynet/account.lua b/docker/nginx/libs/skynet/account.lua index 83ea3ba7..6fa2c4d2 100644 --- a/docker/nginx/libs/skynet/account.lua +++ b/docker/nginx/libs/skynet/account.lua @@ -1,13 +1,46 @@ local _M = {} +-- constant tier ids +local tier_id_anonymous = 0 +local tier_id_free = 1 + -- fallback - remember to keep those updated -local anon_limits = { ["tierName"] = "anonymous", ["upload"] = 655360, ["download"] = 655360, ["maxUploadSize"] = 1073741824, ["registry"] = 250 } +local anon_limits = { + ["tierID"] = tier_id_anonymous, + ["tierName"] = "anonymous", + ["upload"] = 655360, + ["download"] = 655360, + ["maxUploadSize"] = 1073741824, + ["registry"] = 250 +} --- no limits applied -local no_limits = { ["tierName"] = "internal", ["upload"] = 0, ["download"] = 0, ["maxUploadSize"] = 0, ["registry"] = 0 } +-- get all non empty authentication headers from request, we want to return +-- all of them and let accounts service deal with validation and prioritisation +function _M.get_auth_headers() + local utils = require("utils") + local request_headers = ngx.req.get_headers() + local headers = {} --- free tier name -local free_tier = "free" + -- try to extract skynet-jwt cookie from cookie header + local skynet_jwt_cookie = utils.extract_cookie(request_headers["Cookie"], "skynet[-]jwt") + + -- if skynet-jwt cookie is present, pass it as is + if skynet_jwt_cookie then + headers["Cookie"] = skynet_jwt_cookie + end + + -- if authorization header is set, pass it as is + if request_headers["Authorization"] then + headers["Authorization"] = request_headers["Authorization"] + end + + -- if skynet api key header is set, pass it as is + if request_headers["Skynet-Api-Key"] then + headers["Skynet-Api-Key"] = request_headers["Skynet-Api-Key"] + end + + return headers +end -- handle request exit when access to portal should be restricted to authenticated users only function _M.exit_access_unauthorized(message) @@ -31,26 +64,26 @@ end function _M.get_account_limits() local cjson = require('cjson') + local utils = require('utils') + local auth_headers = _M.get_auth_headers() - if ngx.var.internal_no_limits == "true" then - return no_limits - end - - if ngx.var.skynet_jwt == "" then + -- simple case of anonymous request - none of available auth headers exist + if utils.is_table_empty(auth_headers) then return anon_limits end if ngx.var.account_limits == "" then local httpc = require("resty.http").new() - + -- 10.10.10.70 points to accounts service (alias not available when using resty-http) - local res, err = httpc:request_uri("http://10.10.10.70:3000/user/limits", { - headers = { ["Cookie"] = "skynet-jwt=" .. ngx.var.skynet_jwt } + local res, err = httpc:request_uri("http://10.10.10.70:3000/user/limits?unit=byte", { + headers = auth_headers, }) - + -- fail gracefully in case /user/limits failed if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.log(ngx.ERR, "Failed accounts service request /user/limits: ", err or ("[HTTP " .. res.status .. "] " .. res.body)) + local error_response = err or ("[HTTP " .. res.status .. "] " .. res.body) + ngx.log(ngx.ERR, "Failed accounts service request /user/limits?unit=byte: ", error_response) ngx.var.account_limits = cjson.encode(anon_limits) elseif res and res.status == ngx.HTTP_OK then ngx.var.account_limits = res.body @@ -62,27 +95,31 @@ end -- detect whether current user is authenticated function _M.is_authenticated() + if not _M.accounts_enabled() then return false end + local limits = _M.get_account_limits() - return limits.tierName ~= anon_limits.tierName + return limits.tierID > tier_id_anonymous end -- detect whether current user has active subscription -function _M.is_subscription_account() +function _M.has_subscription() local limits = _M.get_account_limits() - return limits.tierName ~= anon_limits.tierName and limits.tierName ~= free_tier + return limits.tierID > tier_id_free end function _M.is_auth_required() - return os.getenv("ACCOUNTS_LIMIT_ACCESS") == "authenticated" + -- authentication is required if mode is set to "authenticated" + -- or "subscription" (require active subscription to a premium plan) + return os.getenv("ACCOUNTS_LIMIT_ACCESS") == "authenticated" or _M.is_subscription_required() end function _M.is_subscription_required() return os.getenv("ACCOUNTS_LIMIT_ACCESS") == "subscription" end -function is_access_always_allowed() +local is_access_always_allowed = function () -- options requests do not attach cookies - should always be available -- requests should not be limited based on accounts if accounts are not enabled return ngx.req.get_method() == "OPTIONS" or not _M.accounts_enabled() @@ -101,7 +138,7 @@ function _M.is_access_forbidden() if is_access_always_allowed() then return false end -- check if active subscription is required and request is from user without it - return _M.is_subscription_required() and not _M.is_subscription_account() + return _M.is_subscription_required() and not _M.has_subscription() end return _M diff --git a/docker/nginx/libs/skynet/blocklist.lua b/docker/nginx/libs/skynet/blocklist.lua deleted file mode 100644 index 29f53032..00000000 --- a/docker/nginx/libs/skynet/blocklist.lua +++ /dev/null @@ -1,66 +0,0 @@ -local _M = {} - -function _M.reload() - local httpc = require("resty.http").new() - - -- fetch blocklist records (all blocked skylink hashes) - -- 10.10.10.10 points to sia service (alias not available when using resty-http) - local res, err = httpc:request_uri("http://10.10.10.10:9980/skynet/blocklist", { - headers = { - ["User-Agent"] = "Sia-Agent", - } - }) - - -- fail whole request in case this request failed, we want to make sure - -- the blocklist is pre cached before serving first skylink - if err or (res and res.status ~= ngx.HTTP_OK) then - ngx.log(ngx.ERR, "Failed skyd service request /skynet/blocklist: ", err or ("[HTTP " .. res.status .. "] " .. res.body)) - ngx.status = (err and ngx.HTTP_INTERNAL_SERVER_ERROR) or res.status - ngx.header["content-type"] = "text/plain" - ngx.say(err or res.body) - return ngx.exit(ngx.status) - elseif res and res.status == ngx.HTTP_OK then - local json = require('cjson') - local data = json.decode(res.body) - - -- mark all existing entries as expired - ngx.shared.blocklist:flush_all() - - -- check if blocklist is table (it is null when empty) - if type(data.blocklist) == "table" then - -- set all cache entries one by one (resets expiration) - for i, hash in ipairs(data.blocklist) do - ngx.shared.blocklist:set(hash, true) - end - end - - -- ensure that init flag is persisted - ngx.shared.blocklist:set("__init", true) - - -- remove all leftover expired entries - ngx.shared.blocklist:flush_expired() - end -end - -function _M.is_blocked(skylink) - -- make sure that blocklist has been preloaded - if not ngx.shared.blocklist:get("__init") then _M.reload() end - - -- hash skylink before comparing it with blocklist - local hash = require("skynet.skylink").hash(skylink) - - -- we need to use get_stale because we are expiring previous - -- entries when the blocklist is reloading and we still want - -- to block them until the reloading is finished - return ngx.shared.blocklist:get_stale(hash) == true -end - --- exit with 416 illegal content status code -function _M.exit_illegal() - ngx.status = ngx.HTTP_ILLEGAL - ngx.header["content-type"] = "text/plain" - ngx.say("Unavailable For Legal Reasons") - return ngx.exit(ngx.status) -end - -return _M diff --git a/docker/nginx/libs/skynet/skylink.lua b/docker/nginx/libs/skynet/skylink.lua index adcf0b70..86d1c4bc 100644 --- a/docker/nginx/libs/skynet/skylink.lua +++ b/docker/nginx/libs/skynet/skylink.lua @@ -27,7 +27,7 @@ function _M.hash(skylink) -- parse with blake2b with key length of 32 local blake2bHashed = hasher.blake2b(rawMerkleRoot, 32) - + -- hex encode the blake hash local hexHashed = basexx.to_hex(blake2bHashed) diff --git a/docker/nginx/libs/skynet/skylink.spec.lua b/docker/nginx/libs/skynet/skylink.spec.lua index 0502a833..9977d7c8 100644 --- a/docker/nginx/libs/skynet/skylink.spec.lua +++ b/docker/nginx/libs/skynet/skylink.spec.lua @@ -7,7 +7,7 @@ describe("parse", function() it("should return unchanged base64 skylink", function() assert.is.same(skynet_skylink.parse(base64), base64) end) - + it("should transform base32 skylink into base64", function() assert.is.same(skynet_skylink.parse(base32), base64) end) diff --git a/docker/nginx/libs/utils.lua b/docker/nginx/libs/utils.lua new file mode 100644 index 00000000..4330c94c --- /dev/null +++ b/docker/nginx/libs/utils.lua @@ -0,0 +1,45 @@ +local _M = {} + +-- utility function for checking if table is empty +function _M.is_table_empty(check) + -- bind next to local variable to achieve ultimate efficiency + -- https://stackoverflow.com/a/1252776 + local next = next + + return next(check) == nil +end + +-- extract full cookie name and value by its name from cookie string +-- note: name matcher argument is a pattern so you will need to escape +-- any special characters, read more https://www.lua.org/pil/20.2.html +function _M.extract_cookie(cookie_string, name_matcher) + -- nil cookie string safeguard + if cookie_string == nil then + return nil + end + + local start, stop = string.find(cookie_string, name_matcher .. "=[^;]+") + + if start then + return string.sub(cookie_string, start, stop) + end + + return nil +end + +-- extract just the cookie value by its name from cookie string +-- note: name matcher argument is a pattern so you will need to escape +-- any special characters, read more https://www.lua.org/pil/20.2.html +function _M.extract_cookie_value(cookie_string, name_matcher) + local cookie = _M.extract_cookie(cookie_string, name_matcher) + + if cookie == nil then + return nil + end + + local value_start = string.find(cookie, "=") + 1 + + return string.sub(cookie, value_start) +end + +return _M diff --git a/docker/nginx/libs/utils.spec.lua b/docker/nginx/libs/utils.spec.lua new file mode 100644 index 00000000..c853c8cd --- /dev/null +++ b/docker/nginx/libs/utils.spec.lua @@ -0,0 +1,79 @@ +local utils = require('utils') + +describe("is_table_empty", function() + it("should return true for empty table", function() + assert.is_true(utils.is_table_empty({})) + end) + + it("should return false for not empty table", function() + assert.is_false(utils.is_table_empty({ ["foo"] = "bar" })) + end) +end) + +describe("extract_cookie", function() + local cookie_string = "aaa=bbb; skynet-jwt=MTY0NzUyr8jD-ytiWtspm0tGabKfooxeIDuWcXhJ3lnY0eEw==; xxx=yyy" + + it("should return nil if cookie string is nil", function() + local cookie = utils.extract_cookie_value(nil, "aaa") + + assert.is_nil(cookie) + end) + + it("should return nil if cookie name is not found", function() + local cookie = utils.extract_cookie(cookie_string, "foo") + + assert.is_nil(cookie) + end) + + it("should return cookie if cookie_string starts with that cookie name", function() + local cookie = utils.extract_cookie(cookie_string, "aaa") + + assert.are.equals(cookie, "aaa=bbb") + end) + + it("should return cookie if cookie_string ends with that cookie name", function() + local cookie = utils.extract_cookie(cookie_string, "xxx") + + assert.are.equals(cookie, "xxx=yyy") + end) + + it("should return cookie with custom matcher", function() + local cookie = utils.extract_cookie(cookie_string, "skynet[-]jwt") + + assert.are.equals(cookie, "skynet-jwt=MTY0NzUyr8jD-ytiWtspm0tGabKfooxeIDuWcXhJ3lnY0eEw==") + end) +end) + +describe("extract_cookie_value", function() + local cookie_string = "aaa=bbb; skynet-jwt=MTY0NzUyr8jD-ytiWtspm0tGabKfooxeIDuWcXhJ3lnY0eEw==; xxx=yyy" + + it("should return nil if cookie string is nil", function() + local value = utils.extract_cookie_value(nil, "aaa") + + assert.is_nil(value) + end) + + it("should return nil if cookie name is not found", function() + local value = utils.extract_cookie_value(cookie_string, "foo") + + assert.is_nil(value) + end) + + it("should return value if cookie_string starts with that cookie name", function() + local value = utils.extract_cookie_value(cookie_string, "aaa") + + assert.are.equals(value, "bbb") + end) + + it("should return cookie if cookie_string ends with that cookie name", function() + local value = utils.extract_cookie_value(cookie_string, "xxx") + + assert.are.equals(value, "yyy") + end) + + it("should return cookie with custom matcher", function() + local value = utils.extract_cookie_value(cookie_string, "skynet[-]jwt") + + assert.are.equals(value, "MTY0NzUyr8jD-ytiWtspm0tGabKfooxeIDuWcXhJ3lnY0eEw==") + end) +end) diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf index eb5494c9..3517a6bc 100644 --- a/docker/nginx/nginx.conf +++ b/docker/nginx/nginx.conf @@ -26,8 +26,8 @@ worker_processes auto; #pid logs/nginx.pid; # declare env variables to use it in config -env SKYNET_PORTAL_API; -env SKYNET_SERVER_API; +env PORTAL_DOMAIN; +env SERVER_DOMAIN; env PORTAL_MODULES; env ACCOUNTS_LIMIT_ACCESS; env SIA_API_PASSWORD; @@ -49,7 +49,7 @@ http { '"$upstream_http_content_type" "$upstream_cache_status" ' '"$server_alias" "$sent_http_skynet_skylink" ' '$upstream_connect_time $upstream_header_time ' - '$request_time "$hns_domain" "$skylink"'; + '$request_time "$hns_domain" "$skylink" $upstream_http_skynet_cache_ratio'; access_log logs/access.log main; @@ -74,28 +74,18 @@ http { # proxy cache definition proxy_cache_path /data/nginx/cache levels=1:2 keys_zone=skynet:10m max_size=50g min_free=100g inactive=48h use_temp_path=off; - # create a shared blocklist dictionary with size of 30 megabytes - # estimated capacity of 1 megabyte dictionary is 3500 blocklist entries - # that gives us capacity of around 100k entries in 30 megabyte dictionary - lua_shared_dict blocklist 30m; - - # create a shared dictionary to fill with skylinks that should not - # be cached due to the large size or some other reasons - lua_shared_dict nocache 10m; - # this runs before forking out nginx worker processes init_by_lua_block { require "cjson" require "resty.http" - require "skynet.blocklist" require "skynet.skylink" require "skynet.utils" } # include skynet-portal-api and skynet-server-api header on every request header_filter_by_lua_block { - ngx.header["Skynet-Portal-Api"] = os.getenv("SKYNET_PORTAL_API") - ngx.header["Skynet-Server-Api"] = os.getenv("SKYNET_SERVER_API") + ngx.header["Skynet-Portal-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_portal_domain + ngx.header["Skynet-Server-Api"] = ngx.var.scheme .. "://" .. ngx.var.skynet_server_domain } # ratelimit specified IPs @@ -127,13 +117,6 @@ http { proxy_set_header X-Forwarded-Host $host; proxy_set_header X-Forwarded-Proto $scheme; - # skynet-jwt contains dash so we cannot use $cookie_skynet-jwt - # https://richardhart.me/2012/03/18/logging-nginx-cookies-with-dashes/ - map $http_cookie $skynet_jwt { - default ''; - ~skynet-jwt=(?[^\;]+) $match; - } - include /etc/nginx/conf.d/*.conf; include /etc/nginx/conf.extra.d/*.conf; } diff --git a/docker/sia/Dockerfile b/docker/sia/Dockerfile index 887b92e9..953dd27c 100644 --- a/docker/sia/Dockerfile +++ b/docker/sia/Dockerfile @@ -5,12 +5,12 @@ ENV GOARCH amd64 ARG branch=portal-latest -RUN git clone https://gitlab.com/SkynetLabs/skyd.git Sia --single-branch --branch ${branch} -RUN make release --directory Sia +RUN git clone https://gitlab.com/SkynetLabs/skyd.git Sia --single-branch --branch ${branch} && \ + make release --directory Sia -FROM nebulouslabs/sia:latest +FROM nebulouslabs/sia:1.5.6 COPY --from=sia-builder /go/bin/ /usr/bin/ -RUN mv /usr/bin/skyd /usr/bin/siad || true && \ - mv /usr/bin/skyc /usr/bin/siac || true +RUN if [ -f "/usr/bin/skyd" ]; then mv /usr/bin/skyd /usr/bin/siad; fi && \ + if [ -f "/usr/bin/skyc" ]; then mv /usr/bin/skyc /usr/bin/siac; fi diff --git a/packages/dashboard-v2/.eslintignore b/packages/dashboard-v2/.eslintignore new file mode 100644 index 00000000..65ea287b --- /dev/null +++ b/packages/dashboard-v2/.eslintignore @@ -0,0 +1,4 @@ +node_modules/ +.cache/ +public/ +storybook-build/ diff --git a/packages/dashboard-v2/.eslintrc.js b/packages/dashboard-v2/.eslintrc.js new file mode 100644 index 00000000..51d8f9b5 --- /dev/null +++ b/packages/dashboard-v2/.eslintrc.js @@ -0,0 +1,6 @@ +module.exports = { + globals: { + __PATH_PREFIX__: true, + }, + extends: ["react-app", "plugin:storybook/recommended"], +}; diff --git a/packages/dashboard-v2/.gitignore b/packages/dashboard-v2/.gitignore new file mode 100644 index 00000000..65ea287b --- /dev/null +++ b/packages/dashboard-v2/.gitignore @@ -0,0 +1,4 @@ +node_modules/ +.cache/ +public/ +storybook-build/ diff --git a/packages/dashboard-v2/.prettierignore b/packages/dashboard-v2/.prettierignore new file mode 100644 index 00000000..65ea287b --- /dev/null +++ b/packages/dashboard-v2/.prettierignore @@ -0,0 +1,4 @@ +node_modules/ +.cache/ +public/ +storybook-build/ diff --git a/packages/dashboard-v2/.prettierrc.json b/packages/dashboard-v2/.prettierrc.json new file mode 100644 index 00000000..963354f2 --- /dev/null +++ b/packages/dashboard-v2/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "printWidth": 120 +} diff --git a/packages/dashboard-v2/.storybook/main.js b/packages/dashboard-v2/.storybook/main.js new file mode 100644 index 00000000..8a1198a4 --- /dev/null +++ b/packages/dashboard-v2/.storybook/main.js @@ -0,0 +1,19 @@ +module.exports = { + stories: ["../src/**/*.stories.@(js|jsx|ts|tsx)"], + addons: [ + "@storybook/addon-links", + "@storybook/addon-essentials", + "storybook-addon-gatsby", + { + name: "@storybook/addon-postcss", + options: { + postcssLoaderOptions: { + implementation: require("postcss"), + }, + }, + }, + ], + core: { + builder: "webpack5", + }, +}; diff --git a/packages/dashboard-v2/.storybook/preview.js b/packages/dashboard-v2/.storybook/preview.js new file mode 100644 index 00000000..de9fb8cb --- /dev/null +++ b/packages/dashboard-v2/.storybook/preview.js @@ -0,0 +1,20 @@ +import "tailwindcss/tailwind.css"; +import "@fontsource/sora/300.css"; // light +import "@fontsource/sora/400.css"; // normal +import "@fontsource/sora/500.css"; // medium +import "@fontsource/sora/600.css"; // semibold +import "@fontsource/source-sans-pro/400.css"; // normal +import "@fontsource/source-sans-pro/600.css"; // semibold + +import "../src/styles/global.css"; + +export const parameters = { + actions: { argTypesRegex: "^on[A-Z].*" }, + controls: { + matchers: { + color: /(background|color)$/i, + date: /Date$/, + }, + }, + layout: "fullscreen", +}; diff --git a/packages/dashboard-v2/Dockerfile b/packages/dashboard-v2/Dockerfile new file mode 100644 index 00000000..70790cfa --- /dev/null +++ b/packages/dashboard-v2/Dockerfile @@ -0,0 +1,14 @@ +FROM node:16.14.2-alpine + +WORKDIR /usr/app + +COPY package.json yarn.lock ./ + +RUN yarn --frozen-lockfile + +COPY static ./static +COPY src ./src +COPY gatsby*.js ./ +COPY postcss.config.js tailwind.config.js ./ + +CMD ["sh", "-c", "yarn build && yarn serve --host 0.0.0.0 -p 9000"] diff --git a/packages/dashboard-v2/README.md b/packages/dashboard-v2/README.md new file mode 100644 index 00000000..ab0421f8 --- /dev/null +++ b/packages/dashboard-v2/README.md @@ -0,0 +1,25 @@ +# Skynet Account Dashboard + +Code behind [account.skynetpro.net](https://account.skynetpro.net/) + +## Development + +This is a Gatsby application. To run it locally, all you need is: + +- `yarn install` +- `yarn start` + +## Accessing remote APIs + +To be able to log in on a local environment with your production credentials, you'll need to make the browser believe you're actually on the same domain, otherwise the browser will block the session cookie. + +To do the trick, edit your `/etc/hosts` file and add a record like this: + +``` +127.0.0.1 local.skynetpro.net +``` + +then run `yarn develop:secure` -- it will run `gatsby develop` with `--https --host=local.skynetpro.net -p=443` options. +If you're on macOS, you may need to `sudo` the command to successfully bind to port `443`. + +> **NOTE:** This should become easier once we have a docker image for the new dashboard. diff --git a/packages/dashboard-v2/gatsby-browser.js b/packages/dashboard-v2/gatsby-browser.js new file mode 100644 index 00000000..79b58e24 --- /dev/null +++ b/packages/dashboard-v2/gatsby-browser.js @@ -0,0 +1,22 @@ +import * as React from "react"; +import "@fontsource/sora/300.css"; // light +import "@fontsource/sora/400.css"; // normal +import "@fontsource/sora/500.css"; // medium +import "@fontsource/sora/600.css"; // semibold +import "@fontsource/source-sans-pro/400.css"; // normal +import "@fontsource/source-sans-pro/600.css"; // semibold +import "./src/styles/global.css"; +import { MODAL_ROOT_ID } from "./src/components/Modal"; +import { PortalSettingsProvider } from "./src/contexts/portal-settings"; + +export function wrapPageElement({ element, props }) { + const Layout = element.type.Layout ?? React.Fragment; + return ( + + + {element} +
+ + + ); +} diff --git a/packages/dashboard-v2/gatsby-config.js b/packages/dashboard-v2/gatsby-config.js new file mode 100644 index 00000000..2280f99b --- /dev/null +++ b/packages/dashboard-v2/gatsby-config.js @@ -0,0 +1,36 @@ +const { createProxyMiddleware } = require("http-proxy-middleware"); + +module.exports = { + siteMetadata: { + title: "Skynet Account", + siteUrl: `https://account.${process.env.GATSBY_PORTAL_DOMAIN}/`, + }, + trailingSlash: "never", + plugins: [ + "gatsby-plugin-image", + "gatsby-plugin-provide-react", + "gatsby-plugin-react-helmet", + "gatsby-plugin-sharp", + "gatsby-transformer-sharp", + "gatsby-plugin-styled-components", + "gatsby-plugin-postcss", + { + resolve: "gatsby-source-filesystem", + options: { + name: "images", + path: "./static/images/", + }, + __key: "images", + }, + ], + developMiddleware: (app) => { + app.use( + "/api/", + createProxyMiddleware({ + target: "https://account.skynetpro.net", + secure: false, // Do not reject self-signed certificates. + changeOrigin: true, + }) + ); + }, +}; diff --git a/packages/dashboard-v2/gatsby-ssr.js b/packages/dashboard-v2/gatsby-ssr.js new file mode 100644 index 00000000..79b58e24 --- /dev/null +++ b/packages/dashboard-v2/gatsby-ssr.js @@ -0,0 +1,22 @@ +import * as React from "react"; +import "@fontsource/sora/300.css"; // light +import "@fontsource/sora/400.css"; // normal +import "@fontsource/sora/500.css"; // medium +import "@fontsource/sora/600.css"; // semibold +import "@fontsource/source-sans-pro/400.css"; // normal +import "@fontsource/source-sans-pro/600.css"; // semibold +import "./src/styles/global.css"; +import { MODAL_ROOT_ID } from "./src/components/Modal"; +import { PortalSettingsProvider } from "./src/contexts/portal-settings"; + +export function wrapPageElement({ element, props }) { + const Layout = element.type.Layout ?? React.Fragment; + return ( + + + {element} +
+ + + ); +} diff --git a/packages/dashboard-v2/package.json b/packages/dashboard-v2/package.json new file mode 100644 index 00000000..b760bf48 --- /dev/null +++ b/packages/dashboard-v2/package.json @@ -0,0 +1,81 @@ +{ + "name": "accounts-dashboard", + "version": "1.0.0", + "private": true, + "description": "Accounts Dashboard", + "author": "Skynet Labs", + "keywords": [ + "gatsby" + ], + "scripts": { + "develop": "gatsby develop", + "develop:secure": "gatsby develop --https --host=local.skynetpro.net -p=443", + "start": "gatsby develop", + "build": "gatsby build", + "serve": "gatsby serve", + "clean": "gatsby clean", + "lint": "eslint .", + "prettier": "prettier .", + "storybook": "start-storybook -p 6006", + "build-storybook": "build-storybook -o storybook-build" + }, + "dependencies": { + "@fontsource/sora": "^4.5.3", + "@fontsource/source-sans-pro": "^4.5.3", + "classnames": "^2.3.1", + "copy-text-to-clipboard": "^3.0.1", + "dayjs": "^1.10.8", + "formik": "^2.2.9", + "gatsby": "^4.6.2", + "gatsby-plugin-postcss": "^5.7.0", + "http-status-codes": "^2.2.0", + "ky": "^0.30.0", + "nanoid": "^3.3.1", + "path-browserify": "^1.0.1", + "postcss": "^8.4.6", + "pretty-bytes": "^6.0.0", + "react": "^17.0.1", + "react-dom": "^17.0.1", + "react-dropzone": "^12.0.4", + "react-helmet": "^6.1.0", + "react-use": "^17.3.2", + "skynet-js": "4.0.27-beta", + "swr": "^1.2.2", + "tailwindcss": "^3.0.23", + "yup": "^0.32.11" + }, + "devDependencies": { + "@babel/core": "^7.17.4", + "@storybook/addon-actions": "^6.4.19", + "@storybook/addon-essentials": "^6.4.19", + "@storybook/addon-interactions": "^6.4.19", + "@storybook/addon-links": "^6.4.19", + "@storybook/addon-postcss": "^2.0.0", + "@storybook/builder-webpack5": "^6.4.19", + "@storybook/manager-webpack5": "^6.4.19", + "@storybook/react": "^6.4.19", + "@storybook/testing-library": "^0.0.9", + "autoprefixer": "^10.4.2", + "babel-eslint": "^10.1.0", + "babel-loader": "^8.2.3", + "babel-plugin-preval": "^5.1.0", + "babel-plugin-styled-components": "^2.0.2", + "eslint": "^8.9.0", + "eslint-config-react-app": "^7.0.0", + "eslint-plugin-storybook": "^0.5.6", + "gatsby-plugin-alias-imports": "^1.0.5", + "gatsby-plugin-image": "^2.6.0", + "gatsby-plugin-preval": "^1.0.0", + "gatsby-plugin-provide-react": "^1.0.2", + "gatsby-plugin-react-helmet": "^5.6.0", + "gatsby-plugin-sharp": "^4.6.0", + "gatsby-plugin-styled-components": "^5.8.0", + "gatsby-source-filesystem": "^4.6.0", + "gatsby-transformer-sharp": "^4.6.0", + "http-proxy-middleware": "^1.3.1", + "prettier": "2.5.1", + "react-is": "^17.0.2", + "storybook-addon-gatsby": "^0.0.5", + "styled-components": "^5.3.3" + } +} diff --git a/packages/dashboard-v2/postcss.config.js b/packages/dashboard-v2/postcss.config.js new file mode 100644 index 00000000..3b35b010 --- /dev/null +++ b/packages/dashboard-v2/postcss.config.js @@ -0,0 +1,3 @@ +module.exports = { + plugins: [require("tailwindcss/nesting"), require("tailwindcss"), require("autoprefixer")], +}; diff --git a/packages/dashboard-v2/src/components/APIKeyList/APIKey.js b/packages/dashboard-v2/src/components/APIKeyList/APIKey.js new file mode 100644 index 00000000..3269bb9f --- /dev/null +++ b/packages/dashboard-v2/src/components/APIKeyList/APIKey.js @@ -0,0 +1,155 @@ +import dayjs from "dayjs"; +import cn from "classnames"; +import { useCallback, useState } from "react"; + +import { Alert } from "../Alert"; +import { Button } from "../Button"; +import { AddSkylinkToAPIKeyForm } from "../forms/AddSkylinkToAPIKeyForm"; +import { CogIcon, TrashIcon } from "../Icons"; +import { Modal } from "../Modal"; + +import { useAPIKeyEdit } from "./useAPIKeyEdit"; +import { useAPIKeyRemoval } from "./useAPIKeyRemoval"; + +export const APIKey = ({ apiKey, onRemoved, onEdited, onRemovalError }) => { + const { id, name, createdAt, skylinks } = apiKey; + const isPublic = apiKey.public === "true"; + const [error, setError] = useState(null); + + const onSkylinkListEdited = useCallback(() => { + setError(null); + onEdited(); + }, [onEdited]); + + const onSkylinkListEditFailure = (errorMessage) => setError(errorMessage); + + const { + removalError, + removalInitiated, + prompt: promptRemoval, + abort: abortRemoval, + confirm: confirmRemoval, + } = useAPIKeyRemoval({ + key: apiKey, + onSuccess: onRemoved, + onFailure: onRemovalError, + }); + + const { + editInitiated, + prompt: promptEdit, + abort: abortEdit, + addSkylink, + removeSkylink, + } = useAPIKeyEdit({ + key: apiKey, + onSkylinkListUpdate: onSkylinkListEdited, + onSkylinkListUpdateFailure: onSkylinkListEditFailure, + }); + + const closeEditModal = useCallback(() => { + setError(null); + abortEdit(); + }, [abortEdit]); + + const skylinksNumber = skylinks?.length ?? 0; + const isNotConfigured = isPublic && skylinksNumber === 0; + const skylinksPhrasePrefix = skylinksNumber === 0 ? "No" : skylinksNumber; + const skylinksPhrase = `${skylinksPhrasePrefix} ${skylinksNumber === 1 ? "skylink" : "skylinks"} configured`; + + return ( +
  • + + + {name || "unnamed key"} + + + + + {dayjs(createdAt).format("MMM DD, YYYY")} + + {isPublic && ( + + )} + + + + {removalInitiated && ( + +

    Delete API key

    +
    +

    Are you sure you want to delete the following API key?

    +

    {name || id}

    +
    + {removalError && {removalError}} + +
    + + +
    +
    + )} + {editInitiated && ( + +

    Covered skylinks

    + {skylinks?.length > 0 ? ( +
      + {skylinks.map((skylink) => ( +
    • + + {skylink} + + +
    • + ))} +
    + ) : ( + No skylinks here yet. You can add the first one below 🙃 + )} + +
    + {error && {error}} + +
    +
    + +
    +
    + )} +
  • + ); +}; diff --git a/packages/dashboard-v2/src/components/APIKeyList/APIKeyList.js b/packages/dashboard-v2/src/components/APIKeyList/APIKeyList.js new file mode 100644 index 00000000..3d3e504d --- /dev/null +++ b/packages/dashboard-v2/src/components/APIKeyList/APIKeyList.js @@ -0,0 +1,14 @@ +import { APIKey } from "./APIKey"; + +export const APIKeyList = ({ keys, reloadKeys, title }) => { + return ( + <> +
    {title}
    +
      + {keys.map((key) => ( + + ))} +
    + + ); +}; diff --git a/packages/dashboard-v2/src/components/APIKeyList/index.js b/packages/dashboard-v2/src/components/APIKeyList/index.js new file mode 100644 index 00000000..8ade7744 --- /dev/null +++ b/packages/dashboard-v2/src/components/APIKeyList/index.js @@ -0,0 +1 @@ +export * from "./APIKeyList"; diff --git a/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyEdit.js b/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyEdit.js new file mode 100644 index 00000000..a821ca02 --- /dev/null +++ b/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyEdit.js @@ -0,0 +1,43 @@ +import { useCallback, useState } from "react"; +import accountsService from "../../services/accountsService"; + +export const useAPIKeyEdit = ({ key, onSkylinkListUpdate, onSkylinkListUpdateFailure }) => { + const [editInitiated, setEditInitiated] = useState(false); + + const prompt = () => setEditInitiated(true); + const abort = () => setEditInitiated(false); + const updateSkylinkList = useCallback( + async (action, skylink) => { + try { + await accountsService.patch(`user/apikeys/${key.id}`, { + json: { + [action]: [skylink], + }, + }); + onSkylinkListUpdate(); + + return true; + } catch (err) { + if (err.response) { + const { message } = await err.response.json(); + onSkylinkListUpdateFailure(message); + } else { + onSkylinkListUpdateFailure("Unknown error occured, please try again."); + } + + return false; + } + }, + [onSkylinkListUpdate, onSkylinkListUpdateFailure, key] + ); + const addSkylink = (skylink) => updateSkylinkList("add", skylink); + const removeSkylink = (skylink) => updateSkylinkList("remove", skylink); + + return { + editInitiated, + prompt, + abort, + addSkylink, + removeSkylink, + }; +}; diff --git a/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyRemoval.js b/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyRemoval.js new file mode 100644 index 00000000..b9c53bd9 --- /dev/null +++ b/packages/dashboard-v2/src/components/APIKeyList/useAPIKeyRemoval.js @@ -0,0 +1,41 @@ +import { useCallback, useState } from "react"; +import accountsService from "../../services/accountsService"; + +export const useAPIKeyRemoval = ({ key, onSuccess }) => { + const [removalInitiated, setRemovalInitiated] = useState(false); + const [removalError, setRemovalError] = useState(null); + + const prompt = () => { + setRemovalError(null); + setRemovalInitiated(true); + }; + const abort = () => setRemovalInitiated(false); + + const confirm = useCallback(async () => { + setRemovalError(null); + try { + await accountsService.delete(`user/apikeys/${key.id}`); + setRemovalInitiated(false); + onSuccess(); + } catch (err) { + let message = "There was an error processing your request. Please try again later."; + + if (err.response) { + const response = await err.response.json(); + if (response.message) { + message = response.message; + } + } + + setRemovalError(message); + } + }, [onSuccess, key]); + + return { + removalInitiated, + removalError, + prompt, + abort, + confirm, + }; +}; diff --git a/packages/dashboard-v2/src/components/Alert/Alert.js b/packages/dashboard-v2/src/components/Alert/Alert.js new file mode 100644 index 00000000..4db72620 --- /dev/null +++ b/packages/dashboard-v2/src/components/Alert/Alert.js @@ -0,0 +1,10 @@ +import styled from "styled-components"; +import cn from "classnames"; + +export const Alert = styled.div.attrs(({ $variant }) => ({ + className: cn("px-3 py-2 sm:px-6 sm:py-4 rounded border", { + "bg-blue-100 border-blue-200 text-palette-400": $variant === "info", + "bg-red-100 border-red-200 text-error": $variant === "error", + "bg-green-100 border-green-200 text-palette-400": $variant === "success", + }), +}))``; diff --git a/packages/dashboard-v2/src/components/Alert/index.js b/packages/dashboard-v2/src/components/Alert/index.js new file mode 100644 index 00000000..b8e17a03 --- /dev/null +++ b/packages/dashboard-v2/src/components/Alert/index.js @@ -0,0 +1 @@ +export * from "./Alert"; diff --git a/packages/dashboard-v2/src/components/AvatarUploader/AvatarUploader.js b/packages/dashboard-v2/src/components/AvatarUploader/AvatarUploader.js new file mode 100644 index 00000000..9f5bbc82 --- /dev/null +++ b/packages/dashboard-v2/src/components/AvatarUploader/AvatarUploader.js @@ -0,0 +1,35 @@ +import { useEffect, useState } from "react"; + +import { useUser } from "../../contexts/user"; +import { SimpleUploadIcon } from "../Icons"; + +const AVATAR_PLACEHOLDER = "/images/avatar-placeholder.svg"; + +export const AvatarUploader = (props) => { + const { user } = useUser(); + const [imageUrl, setImageUrl] = useState(AVATAR_PLACEHOLDER); + + useEffect(() => { + setImageUrl(user.avatarUrl ?? AVATAR_PLACEHOLDER); + }, [user]); + + return ( +
    +
    + +
    +
    + + {/* TODO: actual uploading */} +
    +
    + ); +}; diff --git a/packages/dashboard-v2/src/components/AvatarUploader/index.js b/packages/dashboard-v2/src/components/AvatarUploader/index.js new file mode 100644 index 00000000..74358cdc --- /dev/null +++ b/packages/dashboard-v2/src/components/AvatarUploader/index.js @@ -0,0 +1 @@ +export * from "./AvatarUploader"; diff --git a/packages/dashboard-v2/src/components/Button/Button.js b/packages/dashboard-v2/src/components/Button/Button.js new file mode 100644 index 00000000..328d52cd --- /dev/null +++ b/packages/dashboard-v2/src/components/Button/Button.js @@ -0,0 +1,37 @@ +import cn from "classnames"; +import PropTypes from "prop-types"; +import styled from "styled-components"; + +/** + * Primary UI component for user interaction + */ +export const Button = styled.button.attrs(({ disabled, $primary, type }) => ({ + type, + className: cn("px-6 py-2.5 rounded-full font-sans uppercase text-xs tracking-wide transition-[opacity_filter]", { + "bg-primary text-palette-600": $primary, + "bg-white border-2 border-black text-palette-600": !$primary, + "cursor-not-allowed opacity-60": disabled, + "hover:brightness-90": !disabled, + }), +}))``; + +Button.propTypes = { + /** + * Is this the principal call to action on the page? + */ + $primary: PropTypes.bool, + /** + * Prevent interaction on the button + */ + disabled: PropTypes.bool, + /** + * Type of button (button / submit) + */ + type: PropTypes.oneOf(["button", "submit"]), +}; + +Button.defaultProps = { + $primary: false, + disabled: false, + type: "button", +}; diff --git a/packages/website/stories/Button.stories.js b/packages/dashboard-v2/src/components/Button/Button.stories.js similarity index 58% rename from packages/website/stories/Button.stories.js rename to packages/dashboard-v2/src/components/Button/Button.stories.js index 44d09366..74f2ca90 100644 --- a/packages/website/stories/Button.stories.js +++ b/packages/dashboard-v2/src/components/Button/Button.stories.js @@ -1,18 +1,20 @@ -import React from "react"; - import { Button } from "./Button"; +// More on default export: https://storybook.js.org/docs/react/writing-stories/introduction#default-export export default { - title: "Example/Button", + title: "SkynetLibrary/Button", component: Button, + // More on argTypes: https://storybook.js.org/docs/react/api/argtypes argTypes: { backgroundColor: { control: "color" }, }, }; +// More on component templates: https://storybook.js.org/docs/react/writing-stories/introduction#using-args const Template = (args) => + + Copied to clipboard + +
    + ); +}; diff --git a/packages/dashboard-v2/src/components/CurrentPlan/CurrentPlan.js b/packages/dashboard-v2/src/components/CurrentPlan/CurrentPlan.js new file mode 100644 index 00000000..f8a5cf9e --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentPlan/CurrentPlan.js @@ -0,0 +1,48 @@ +import dayjs from "dayjs"; +import relativeTime from "dayjs/plugin/relativeTime"; + +import { useUser } from "../../contexts/user"; +import useActivePlan from "../../hooks/useActivePlan"; +import { ContainerLoadingIndicator } from "../LoadingIndicator"; + +import LatestPayment from "./LatestPayment"; +import SuggestedPlan from "./SuggestedPlan"; + +dayjs.extend(relativeTime); + +const CurrentPlan = () => { + const { user, error: userError } = useUser(); + const { plans, activePlan, error: plansError } = useActivePlan(user); + + if (!user || !activePlan) { + return ; + } + + if (userError || plansError) { + return ( +
    +

    An error occurred while loading this data.

    +

    We'll retry automatically.

    +
    + ); + } + + return ( +
    +

    {activePlan.name}

    +
    + {activePlan.price === 0 &&

    100GB without paying a dime! 🎉

    } + {activePlan.price !== 0 && + (user.subscriptionCancelAtPeriodEnd ? ( +

    Your subscription expires {dayjs(user.subscribedUntil).fromNow()}

    + ) : ( +

    {dayjs(user.subscribedUntil).fromNow(true)} until the next payment

    + ))} + + +
    +
    + ); +}; + +export default CurrentPlan; diff --git a/packages/dashboard-v2/src/components/CurrentPlan/LatestPayment.js b/packages/dashboard-v2/src/components/CurrentPlan/LatestPayment.js new file mode 100644 index 00000000..8ca2ab9e --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentPlan/LatestPayment.js @@ -0,0 +1,18 @@ +import dayjs from "dayjs"; + +// TODO: this is not an accurate information, we need this data from the backend +const LatestPayment = ({ user }) => ( +
    +
    + Latest payment + + {dayjs(user.subscribedUntil).subtract(1, "month").format("MM/DD/YYYY")} + +
    +
    + Success +
    +
    +); + +export default LatestPayment; diff --git a/packages/dashboard-v2/src/components/CurrentPlan/SuggestedPlan.js b/packages/dashboard-v2/src/components/CurrentPlan/SuggestedPlan.js new file mode 100644 index 00000000..21aa9b48 --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentPlan/SuggestedPlan.js @@ -0,0 +1,24 @@ +import { Link } from "gatsby"; +import { useMemo } from "react"; + +import { Button } from "../Button"; + +const SuggestedPlan = ({ plans, activePlan }) => { + const nextPlan = useMemo(() => plans.find(({ tier }) => tier > activePlan.tier), [plans, activePlan]); + + if (!nextPlan) { + return null; + } + + return ( +
    +

    Discover {nextPlan.name}

    +

    {nextPlan.description}

    + +
    + ); +}; + +export default SuggestedPlan; diff --git a/packages/dashboard-v2/src/components/CurrentPlan/index.js b/packages/dashboard-v2/src/components/CurrentPlan/index.js new file mode 100644 index 00000000..20390eab --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentPlan/index.js @@ -0,0 +1,3 @@ +import CurrentPlan from "./CurrentPlan"; + +export default CurrentPlan; diff --git a/packages/dashboard-v2/src/components/CurrentUsage/CurrentUsage.js b/packages/dashboard-v2/src/components/CurrentUsage/CurrentUsage.js new file mode 100644 index 00000000..081b9cca --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentUsage/CurrentUsage.js @@ -0,0 +1,110 @@ +import { useEffect, useMemo, useState } from "react"; +import fileSize from "pretty-bytes"; +import { Link } from "gatsby"; +import useSWR from "swr"; + +import { useUser } from "../../contexts/user"; +import useActivePlan from "../../hooks/useActivePlan"; +import { ContainerLoadingIndicator } from "../LoadingIndicator"; + +import { GraphBar } from "./GraphBar"; +import { UsageGraph } from "./UsageGraph"; + +const useUsageData = () => { + const { user } = useUser(); + const { activePlan, error } = useActivePlan(user); + const { data: stats, error: statsError } = useSWR("user/stats"); + + const [loading, setLoading] = useState(true); + const [usage, setUsage] = useState({}); + + const hasError = error || statsError; + const hasData = activePlan && stats; + + useEffect(() => { + if (hasData || hasError) { + setLoading(false); + } + + if (hasData && !hasError) { + setUsage({ + filesUsed: stats?.numUploads, + filesLimit: activePlan?.limits?.maxNumberUploads, + storageUsed: stats?.totalUploadsSize, + storageLimit: activePlan?.limits?.storageLimit, + }); + } + }, [hasData, hasError, stats, activePlan]); + + return { + error: error || statsError, + loading, + usage, + }; +}; + +const size = (bytes) => { + const text = fileSize(bytes ?? 0, { maximumFractionDigits: 0 }); + const [value, unit] = text.split(" "); + + return { + text, + value, + unit, + }; +}; + +const ErrorMessage = () => ( +
    +

    We were not able to fetch the current usage data.

    +

    We'll try again automatically.

    +
    +); + +export default function CurrentUsage() { + const { usage, error, loading } = useUsageData(); + const storageUsage = size(usage.storageUsed); + const storageLimit = size(usage.storageLimit); + const filesUsedLabel = useMemo(() => ({ value: usage.filesUsed, unit: "files" }), [usage.filesUsed]); + + if (loading) { + return ; + } + + if (error) { + return ; + } + + return ( + <> +

    + {storageUsage.text} of {storageLimit.text} +

    +

    + {usage.filesUsed} of {usage.filesLimit} files +

    +
    +
    + Storage + {storageLimit.text} +
    + + + + +
    + Files + + + UPGRADE + {" "} + {usage.filesLimit} + +
    +
    + + ); +} diff --git a/packages/dashboard-v2/src/components/CurrentUsage/GraphBar.js b/packages/dashboard-v2/src/components/CurrentUsage/GraphBar.js new file mode 100644 index 00000000..96421f6e --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentUsage/GraphBar.js @@ -0,0 +1,35 @@ +import styled from "styled-components"; + +const Bar = styled.div.attrs({ + className: `relative flex justify-end h-4 bg-primary rounded-l rounded-r-lg`, +})` + min-width: 1rem; + width: ${({ $percentage }) => $percentage}%; +`; + +const BarTip = styled.span.attrs({ + className: "relative w-4 h-4 border-2 rounded-full bg-white border-primary", +})``; + +const BarLabel = styled.span.attrs({ + className: "bg-white rounded border-2 border-palette-200 px-3 whitespace-nowrap absolute shadow", +})` + right: max(0%, ${({ $percentage }) => 100 - $percentage}%); + top: -0.5rem; + transform: translateX(50%); +`; + +export const GraphBar = ({ value, limit, label }) => { + const percentage = typeof limit !== "number" || limit === 0 ? 0 : (value / limit) * 100; + + return ( +
    + + + + + {label.value} {label.unit} + +
    + ); +}; diff --git a/packages/dashboard-v2/src/components/CurrentUsage/UsageGraph.js b/packages/dashboard-v2/src/components/CurrentUsage/UsageGraph.js new file mode 100644 index 00000000..3f6f23c2 --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentUsage/UsageGraph.js @@ -0,0 +1,9 @@ +import styled from "styled-components"; + +export const UsageGraph = styled.div.attrs({ + className: "w-full my-3 grid grid-flow-row grid-rows-2", +})` + height: 146px; + background: url(/images/usage-graph-bg.svg) no-repeat; + background-size: cover; +`; diff --git a/packages/dashboard-v2/src/components/CurrentUsage/index.js b/packages/dashboard-v2/src/components/CurrentUsage/index.js new file mode 100644 index 00000000..802aa4e1 --- /dev/null +++ b/packages/dashboard-v2/src/components/CurrentUsage/index.js @@ -0,0 +1,3 @@ +import CurrentUsage from "./CurrentUsage"; + +export default CurrentUsage; diff --git a/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.js b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.js new file mode 100644 index 00000000..86cbad5f --- /dev/null +++ b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.js @@ -0,0 +1,65 @@ +import { useRef, useState } from "react"; +import { useClickAway } from "react-use"; +import PropTypes from "prop-types"; +import styled, { css, keyframes } from "styled-components"; + +import { ChevronDownIcon } from "../Icons"; + +const dropDown = keyframes` + 0% { + transform: scaleY(0); + } + 80% { + transform: scaleY(1.1); + } + 100% { + transform: scaleY(1); + } +`; + +const Container = styled.div.attrs({ className: `relative inline-flex` })``; + +const Trigger = styled.button.attrs({ + className: "flex items-center", +})``; + +const TriggerIcon = styled(ChevronDownIcon).attrs({ + className: "transition-transform text-primary", +})` + transform: ${({ open }) => (open ? "rotateX(180deg)" : "none")}; +`; + +const Flyout = styled.div.attrs(({ open }) => ({ + className: `absolute top-full right-0 p-0 z-10 + border rounded border-palette-100 + bg-white shadow-md shadow-palette-200/50 + ${open ? "visible" : "invisible"}`, +}))` + animation: ${({ open }) => + open + ? css` + ${dropDown} 0.1s ease-in-out + ` + : "none"}; +`; + +export const DropdownMenu = ({ title, children }) => { + const [open, setOpen] = useState(false); + const menuRef = useRef(); + + useClickAway(menuRef, () => setOpen(false)); + + return ( + + setOpen((open) => !open)}> + {title} + + {children} + + ); +}; + +DropdownMenu.propTypes = { + title: PropTypes.string.isRequired, + children: PropTypes.node.isRequired, +}; diff --git a/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.stories.js b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.stories.js new file mode 100644 index 00000000..09e5a712 --- /dev/null +++ b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenu.stories.js @@ -0,0 +1,25 @@ +import { Panel } from "../Panel"; +import { DropdownMenu, DropdownMenuLink } from "."; +import { CogIcon, LockClosedIcon } from "../Icons"; + +export default { + title: "SkynetLibrary/DropdownMenu", + component: DropdownMenu, + subcomponents: { + DropdownMenuLink, + }, + decorators: [ + (Story) => ( + + + + ), + ], +}; + +export const NavigationDropdown = () => ( + + + + +); diff --git a/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenuLink.js b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenuLink.js new file mode 100644 index 00000000..426501c4 --- /dev/null +++ b/packages/dashboard-v2/src/components/DropdownMenu/DropdownMenuLink.js @@ -0,0 +1,22 @@ +import styled from "styled-components"; +import PropTypes from "prop-types"; + +const DropdownLink = styled.a.attrs({ + className: `m-0 border-t border-palette-200/50 h-[60px] + whitespace-nowrap transition-colors + hover:bg-palette-100/50 flex items-center + pr-8 pl-6 py-4 gap-4 first:border-0`, +})``; + +export const DropdownMenuLink = ({ active, icon: Icon, label, ...props }) => ( + + {Icon ? : null} + {label} + +); + +DropdownMenuLink.propTypes = { + label: PropTypes.oneOfType([PropTypes.node, PropTypes.string]).isRequired, + active: PropTypes.bool, + icon: PropTypes.func, +}; diff --git a/packages/dashboard-v2/src/components/DropdownMenu/index.js b/packages/dashboard-v2/src/components/DropdownMenu/index.js new file mode 100644 index 00000000..db833f26 --- /dev/null +++ b/packages/dashboard-v2/src/components/DropdownMenu/index.js @@ -0,0 +1,2 @@ +export * from "./DropdownMenu"; +export * from "./DropdownMenuLink"; diff --git a/packages/dashboard-v2/src/components/FileList/FileList.js b/packages/dashboard-v2/src/components/FileList/FileList.js new file mode 100644 index 00000000..6342b970 --- /dev/null +++ b/packages/dashboard-v2/src/components/FileList/FileList.js @@ -0,0 +1,74 @@ +import * as React from "react"; +import useSWR from "swr"; +import { useMedia } from "react-use"; + +import theme from "../../lib/theme"; + +import { ContainerLoadingIndicator } from "../LoadingIndicator"; +import { Select, SelectOption } from "../Select"; +import { Switch } from "../Switch"; +import { TextInputIcon } from "../TextInputIcon/TextInputIcon"; +import { SearchIcon } from "../Icons"; + +import FileTable from "./FileTable"; +import useFormattedFilesData from "./useFormattedFilesData"; + +const FileList = ({ type }) => { + const isMediumScreenOrLarger = useMedia(`(min-width: ${theme.screens.md})`); + const { data, error } = useSWR(`user/${type}?pageSize=10`); + const items = useFormattedFilesData(data?.items || []); + + const setFilter = (name, value) => console.log("filter", name, "set to", value); + + if (!items.length) { + return ( +
    + {/* TODO: proper error message */} + {!data && !error && } + {!data && error &&

    An error occurred while loading this data.

    } + {data &&

    No {type} found.

    } +
    + ); + } + + return ( +
    +
    + } + onChange={console.log.bind(console)} + /> +
    + setFilter("showSmallFiles", value)} className="mr-8"> + + Show small files + + +
    + File type: + +
    +
    + Sort: + +
    +
    +
    + {/* TODO: mobile view (it's not tabular) */} + {isMediumScreenOrLarger ? : "Mobile view"} +
    + ); +}; + +export default FileList; diff --git a/packages/dashboard-v2/src/components/FileList/FileTable.js b/packages/dashboard-v2/src/components/FileList/FileTable.js new file mode 100644 index 00000000..c2f133d6 --- /dev/null +++ b/packages/dashboard-v2/src/components/FileList/FileTable.js @@ -0,0 +1,111 @@ +import { CogIcon, ShareIcon } from "../Icons"; +import { PopoverMenu } from "../PopoverMenu/PopoverMenu"; +import { Table, TableBody, TableCell, TableHead, TableHeadCell, TableRow } from "../Table"; +import { CopyButton } from "../CopyButton"; + +const buildShareMenu = (item) => { + return [ + { + label: "Facebook", + callback: () => { + console.info("share to Facebook", item); + }, + }, + { + label: "Twitter", + callback: () => { + console.info("share to Twitter", item); + }, + }, + { + label: "Discord", + callback: () => { + console.info("share to Discord", item); + }, + }, + ]; +}; + +const buildOptionsMenu = (item) => { + return [ + { + label: "Preview", + callback: () => { + console.info("preview", item); + }, + }, + { + label: "Download", + callback: () => { + console.info("download", item); + }, + }, + { + label: "Unpin", + callback: () => { + console.info("unpin", item); + }, + }, + { + label: "Report", + callback: () => { + console.info("report", item); + }, + }, + ]; +}; + +export default function FileTable({ items }) { + return ( + + + + Name + Type + + Size + + Uploaded + Skylink + Activity + + + + {items.map((item) => { + const { id, name, type, size, date, skylink } = item; + + return ( + + {name} + {type} + + {size} + + {date} + +
    + + {skylink} +
    +
    + +
    + + + + + + +
    +
    +
    + ); + })} +
    +
    + ); +} diff --git a/packages/dashboard-v2/src/components/FileList/index.js b/packages/dashboard-v2/src/components/FileList/index.js new file mode 100644 index 00000000..93296508 --- /dev/null +++ b/packages/dashboard-v2/src/components/FileList/index.js @@ -0,0 +1 @@ +export * from "./FileList"; diff --git a/packages/dashboard-v2/src/components/FileList/useFormattedFilesData.js b/packages/dashboard-v2/src/components/FileList/useFormattedFilesData.js new file mode 100644 index 00000000..82d95090 --- /dev/null +++ b/packages/dashboard-v2/src/components/FileList/useFormattedFilesData.js @@ -0,0 +1,26 @@ +import { useMemo } from "react"; +import prettyBytes from "pretty-bytes"; +import dayjs from "dayjs"; + +const parseFileName = (fileName) => { + const lastDotIndex = Math.max(0, fileName.lastIndexOf(".")) || Infinity; + + return [fileName.substr(0, lastDotIndex), fileName.substr(lastDotIndex)]; +}; + +const formatItem = ({ size, name: rawFileName, uploadedOn, downloadedOn, ...rest }) => { + const [name, type] = parseFileName(rawFileName); + const date = dayjs(uploadedOn || downloadedOn).format("MM/DD/YYYY; HH:MM"); + + return { + ...rest, + date, + size: prettyBytes(size), + type, + name, + }; +}; + +const useFormattedFilesData = (items) => useMemo(() => items.map(formatItem), [items]); + +export default useFormattedFilesData; diff --git a/packages/dashboard-v2/src/components/Footer/Footer.js b/packages/dashboard-v2/src/components/Footer/Footer.js new file mode 100644 index 00000000..501d502d --- /dev/null +++ b/packages/dashboard-v2/src/components/Footer/Footer.js @@ -0,0 +1,8 @@ +import * as React from "react"; +import { PageContainer } from "../PageContainer"; + +export const Footer = () => ( + +

    © Skynet Labs Inc. All rights reserved.

    +
    +); diff --git a/packages/dashboard-v2/src/components/Footer/index.js b/packages/dashboard-v2/src/components/Footer/index.js new file mode 100644 index 00000000..5fca53f7 --- /dev/null +++ b/packages/dashboard-v2/src/components/Footer/index.js @@ -0,0 +1 @@ +export * from "./Footer"; diff --git a/packages/dashboard-v2/src/components/Form/TextField.js b/packages/dashboard-v2/src/components/Form/TextField.js new file mode 100644 index 00000000..6ae35021 --- /dev/null +++ b/packages/dashboard-v2/src/components/Form/TextField.js @@ -0,0 +1,56 @@ +import PropTypes from "prop-types"; +import cn from "classnames"; +import { Field } from "formik"; + +export const TextField = ({ id, label, name, error, touched, className, ...props }) => { + return ( +
    + {label && ( + + )} + + {touched && error && ( +
    + {error} +
    + )} +
    + ); +}; + +/** Besides noted properties, it accepts all props accepted by: + * - a regular element + * - Formik's component + */ +TextField.propTypes = { + /** + * ID for the field. Used to couple