diff --git a/.gitattributes b/.gitattributes
deleted file mode 100644
index 00a51af..0000000
--- a/.gitattributes
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# https://help.github.com/articles/dealing-with-line-endings/
-#
-# These are explicitly windows files and should use crlf
-*.bat text eol=crlf
-
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
new file mode 100644
index 0000000..c6d896c
--- /dev/null
+++ b/.github/workflows/build.yaml
@@ -0,0 +1,21 @@
+name: Build and Deploy
+on: [push]
+permissions:
+ contents: write
+jobs:
+ build-and-deploy:
+ concurrency: ci-${{ github.ref }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout 🛎️
+ uses: actions/checkout@v3
+
+ - name: Install and Build 🔧
+ run: |
+ cd website && yarn install && yarn run build
+ cp -R build/ ../public/
+
+ - name: Deploy 🚀
+ uses: JamesIves/github-pages-deploy-action@v4
+ with:
+ folder: public
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
deleted file mode 100644
index 958930c..0000000
--- a/.github/workflows/build.yml
+++ /dev/null
@@ -1,81 +0,0 @@
-name: build
-
-on:
- push:
- branches:
- - main
- pull_request:
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- include:
- - scalaVersion: 2.11
- sparkVersion: 2.3.2
- scalaCompt: 2.11.12
- - scalaVersion: 2.11
- sparkVersion: 2.4.8
- scalaCompt: 2.11.12
- - scalaVersion: 2.12
- sparkVersion: 2.4.8
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.0.3
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.1.2
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.2.0
- scalaCompt: 2.12.15
- - scalaVersion: 2.13
- sparkVersion: 3.2.0
- scalaCompt: 2.13.7
- - scalaVersion: 2.12
- sparkVersion: 3.3.0
- scalaCompt: 2.12.15
- - scalaVersion: 2.13
- sparkVersion: 3.3.0
- scalaCompt: 2.13.7
- - scalaVersion: 2.13
- sparkVersion: 3.4.0
- scalaCompt: 2.13.10
- - scalaVersion: 2.13
- sparkVersion: 3.5.0
- scalaCompt: 2.13.12
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up JDK 8
- uses: actions/setup-java@v2
- with:
- java-version: '8'
- distribution: 'adopt'
-
- - name: Setup Gradle Dependencies Cache
- uses: actions/cache@v2.1.6
- with:
- path: ~/.gradle/caches
- key: ${{ runner.os }}-gradle-caches--PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }} -${{ hashFiles('**/*.gradle', '**/*.gradle.kts') }}
-
- - name: Setup Gradle Wrapper Cache
- uses: actions/cache@v2.1.6
- with:
- path: ~/.gradle/wrapper
- key: ${{ runner.os }}-gradle-wrapper-${{ hashFiles('**/gradle/wrapper/gradle-wrapper.properties') }}
-
- - name: Style Check
- run: ./gradlew :spark:scalastyleMainCheck -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-
- - name: Test
- run: ./gradlew test -x :flink:test -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-# run: ./gradlew test aggregateScoverage -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-
-# - uses: codecov/codecov-action@v2
-# with:
-# token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
-# directory: ./build/reports/scoverage/
-# verbose: true
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 803ba57..0000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-name: release
-
-on:
- release:
- types: [ prereleased, released ]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- include:
- - scalaVersion: 2.11
- sparkVersion: 2.3.2
- scalaCompt: 2.11.12
- - scalaVersion: 2.11
- sparkVersion: 2.4.8
- scalaCompt: 2.11.12
- - scalaVersion: 2.12
- sparkVersion: 2.4.8
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.0.3
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.1.2
- scalaCompt: 2.12.15
- - scalaVersion: 2.12
- sparkVersion: 3.2.0
- scalaCompt: 2.12.15
- - scalaVersion: 2.13
- sparkVersion: 3.2.0
- scalaCompt: 2.13.7
- - scalaVersion: 2.12
- sparkVersion: 3.3.0
- scalaCompt: 2.12.15
- - scalaVersion: 2.13
- sparkVersion: 3.3.0
- scalaCompt: 2.13.7
- - scalaVersion: 2.13
- sparkVersion: 3.4.0
- scalaCompt: 2.13.10
- - scalaVersion: 2.13
- sparkVersion: 3.5.0
- scalaCompt: 2.13.12
- steps:
- - uses: actions/checkout@v2
-
- - name: setup test container reuse
- run: |
- echo 'testcontainers.reuse.enable=true' >> ~/.testcontainers.properties
-
- - name: Set up JDK 8
- uses: actions/setup-java@v2
- with:
- java-version: '8'
- distribution: 'adopt'
-
- - name: Setup Gradle Dependencies Cache
- uses: actions/cache@v2.1.6
- with:
- path: ~/.gradle/caches
- key: ${{ runner.os }}-gradle-caches--PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }} -${{ hashFiles('**/*.gradle', '**/*.gradle.kts') }}
-
- - name: Setup Gradle Wrapper Cache
- uses: actions/cache@v2.1.6
- with:
- path: ~/.gradle/wrapper
- key: ${{ runner.os }}-gradle-wrapper-${{ hashFiles('**/gradle/wrapper/gradle-wrapper.properties') }}
-
- - name: Style Check
- run: ./gradlew scalastyleMainCheck -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-
- - name: Test
- run: ./gradlew test buildJars -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-# run: ./gradlew test buildJars aggregateScoverage -PscalaVersion=${{ matrix.scalaVersion }} -PsparkVersion=${{ matrix.sparkVersion }} -PscalaCompt=${{ matrix.scalaCompt }}
-
-# - uses: codecov/codecov-action@v2
-# with:
-# token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
-# directory: ./build/reports/scoverage/
-# verbose: true
-
- - name: Upload Release Asset
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: gh release upload ${{ github.event.release.tag_name }} ./spark/build/libs/sharp-etl-spark-standalone-${{ matrix.sparkVersion }}_${{ matrix.scalaVersion }}-${{ github.event.release.tag_name }}.jar
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b91b51a..4392b28 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,39 +1,15 @@
-*.ipr
-*.iws
-.gradle
-build
-.idea
-classes
+# Directories #
+/build/
+target/
+
+_site
+.sass-cache
+.jekyll-cache
+.jekyll-metadata
+.idea/
*.iml
.DS_Store
-log/
-out
-.local
-buildSrc
-*-local.yml
-*-local.xml
-gradle/*
-gradle/wrapper/dists
-!gradle/wrapper/
-logs
-deploy
-deploy-uat
-deploy-production
-.tool-versions
-.vscode/
-.classpath
-.project
-.bloop
-.metals
-.settings
-common/bin
-core/bin
-spark/bin
+*.pdf
*.log
-.run
-metastore_db
-*.jar
-~$*.xlsx
-spark-warehouse
-**/bin
-paimon-warehouse
+website/metastore_db
+content
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
deleted file mode 100644
index 34f1437..0000000
--- a/.gitlab-ci.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-image: gradle:jdk8
-
-# DinD service is required for Testcontainers
-services:
- - name: docker:dind
- # explicitly disable tls to avoid docker startup interruption
- command: [ "--tls=false" ]
-
-variables:
- # Instruct Testcontainers to use the daemon of DinD.
- DOCKER_HOST: "tcp://docker:2375"
- # Instruct Docker not to start over TLS.
- DOCKER_TLS_CERTDIR: ""
- # Improve performance with overlayfs.
- DOCKER_DRIVER: overlay2
-
-before_script:
- - export GRADLE_USER_HOME=`pwd`/.gradle
-
-cache:
- key: "$CI_JOB_NAME"
- paths:
- - .gradle/wrapper
- - .gradle/caches
-
-style-check:
- stage: .pre
- script:
- - ./gradlew scalastyleMainCheck
-
-build:
- stage: build
- script:
- - echo 'testcontainers.reuse.enable=true' >> ~/.testcontainers.properties && ./gradlew clean test build $ENV
- parallel:
- matrix:
- - ENV: [
- '-PscalaVersion=2.12 -PsparkVersion=3.1.2 -PscalaCompt=2.12.15',
- '-PscalaVersion=2.12 -PsparkVersion=3.2.0 -PscalaCompt=2.12.15',
- '-PscalaVersion=2.12 -PsparkVersion=2.4.8 -PscalaCompt=2.12.15',
- '-PscalaVersion=2.11 -PsparkVersion=2.4.8 -PscalaCompt=2.11.12',
- '-PscalaVersion=2.11 -PsparkVersion=2.3.2 -PscalaCompt=2.11.12',
- '-PscalaVersion=2.13 -PsparkVersion=3.2.0 -PscalaCompt=2.13.7',
- ]
- artifacts:
- paths:
- - spark/build/libs/*.jar
- expire_in: 1 week
\ No newline at end of file
diff --git a/.java-version b/.java-version
deleted file mode 100644
index 6259340..0000000
--- a/.java-version
+++ /dev/null
@@ -1 +0,0 @@
-1.8
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 0000000..bd5eac5
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,5 @@
+{
+ "recommendations": [
+ "takumii.markdowntable"
+ ]
+}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 261eeb9..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/README.md b/README.md
index 2806e05..6ffad61 100644
--- a/README.md
+++ b/README.md
@@ -1,100 +1,49 @@
-
Sharp ETL
-
-
-
-
-
-
-
-
-
-
-Sharp ETL is an ETL framework that simplifies writing and executing ETLs by simply writing SQL workflow files.
-The SQL workflow file format is combined your favorite SQL dialects with just a little bit of configuration.
-
-## Getting started
-
-### Let's start a sharp ETL system db first
-
-```shell
-docker run --name sharp_etl_db -d -p 3306:3306 -e MYSQL_ROOT_PASSWORD=root -e MYSQL_DATABASE=sharp_etl mysql:5.7
-```
-
-### build from source or download jar from [releases](https://github.com/SharpData/SharpETL/releases)
+# Website
-```shell
-./gradlew buildJars -PscalaVersion=2.12 -PsparkVersion=3.3.0 -PscalaCompt=2.12.15
-```
+This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
-### Take a look at `hello_world.sql`
+### Installation
-```shell
-cat spark/src/main/resources/tasks/hello_world.sql
+```
+$ yarn
```
-you will see the following contents:
-
-```sql
--- workflow=hello_world
--- loadType=incremental
--- logDrivenType=timewindow
+### Local Development
--- step=define variable
--- source=temp
--- target=variables
+```
+$ yarn start
+```
-SELECT 'RESULT' AS `OUTPUT_COL`;
+This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
--- step=print SUCCESS to console
--- source=temp
--- target=console
+### Build
-SELECT 'SUCCESS' AS `${OUTPUT_COL}`;
+```
+$ yarn build
```
-### Run and check the console output
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
-```shell
-spark-submit --master local --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/sharp-etl-spark-standalone-3.3.0_2.12-0.1.0.jar single-job --name=hello_world --period=1440 --default-start-time="2022-07-01 00:00:00" --once --local
-```
+### Deployment
-And you will see the output like:
+Using SSH:
```
-== Physical Plan ==
-*(1) Project [SUCCESS AS RESULT#17167]
-+- Scan OneRowRelation[]
-root
- |-- RESULT: string (nullable = false)
-
-+-------+
-|RESULT |
-+-------+
-|SUCCESS|
-+-------+
+$ USE_SSH=true yarn deploy
```
+Not using SSH:
-## Versions and dependencies
-
-The compatible versions of [Spark](http://spark.apache.org/) are as follows:
+```
+$ GIT_USER= yarn deploy
+```
-| Spark | Scala
-| ----- | --------
-| 2.3.x | 2.11
-| 2.4.x | 2.11 / 2.12
-| 3.0.x | 2.12
-| 3.1.x | 2.12
-| 3.2.x | 2.12 / 2.13
-| 3.3.x | 2.12 / 2.13
-| 3.4.x | 2.12 / 2.13
-| 3.5.x | 2.13
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
+### Continuous Integration
+Some common defaults for linting/formatting have been set for you. If you integrate your project with an open source Continuous Integration system (e.g. Travis CI, CircleCI), you may check for issues using the following command.
-## License
-[](https://app.fossa.com/projects/git%2Bgithub.com%2FSharpData%2FSharpETL?ref=badge_large)
+```
+$ yarn ci
+```
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index 6e31348..0000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,59 +0,0 @@
-variables:
- GRADLE_USER_HOME: $(Pipeline.Workspace)/.gradle
-
-trigger:
- - main
-
-pool:
- vmImage: ubuntu-latest
-
-pr:
- autoCancel: true
- # PRs into ...
- branches:
- include:
- - main
-
-jobs:
- - job: "Build"
- steps:
- - task: Cache@2
- inputs:
- key: 'gradle | "$(Agent.OS)" | **/build.gradle'
- restoreKeys: |
- gradle | "$(Agent.OS)"
- gradle
- path: $(GRADLE_USER_HOME)
- displayName: Configure gradle caching
-
- - task: JavaToolInstaller@0
- inputs:
- versionSpec: '8'
- jdkArchitectureOption: 'x64'
- jdkSourceOption: 'PreInstalled'
-
- - task: Bash@3
- inputs:
- targetType: 'inline'
- script: |
- ./gradlew scalastyleMainCheck test aggregateScoverage -PscalaVersion=2.11 -PsparkVersion=2.3.2 -PscalaCompt=2.11.12
- displayName: test
-
- - script: |
- curl -Os https://uploader.codecov.io/latest/linux/codecov
- chmod +x codecov
- ./codecov -t ${CODECOV_TOKEN} --file /home/vsts/work/1/s/build/reports/scoverage/cobertura.xml
- displayName: 'codecov'
-
- - task: Bash@3
- inputs:
- targetType: 'inline'
- script: |
- # stop the Gradle daemon to ensure no files are left open (impacting the save cache operation later)
- ./gradlew --stop
- displayName: end
-
-# - task: PublishBuildArtifacts@1
-# inputs:
-# pathToPublish: /home/vsts/work/1/s/spark/build/libs/*.jar
-# artifactName: sharp-etl-spark
diff --git a/build.gradle b/build.gradle
deleted file mode 100644
index f1aef9e..0000000
--- a/build.gradle
+++ /dev/null
@@ -1,96 +0,0 @@
-plugins {
- id "java"
- id "application"
- id "scala"
- id "com.github.alisiikh.scalastyle" version "3.4.1"
- id "com.github.maiflai.scalatest" version "0.32"
- id "maven-publish"
- //id "org.scoverage" version "7.0.0"
-}
-
-group = 'com.github.sharpdata.sharpetl'
-sourceCompatibility = 1.8
-version = '0.2.0'
-
-ext {
- scalaVersion = scalaVersion
- sparkVersion = sparkVersion
- scalaCompt = scalaCompt
-}
-
-subprojects {
- version "0.1.0"
-}
-
-allprojects {
- apply plugin: "java"
- apply plugin: "scala"
- apply plugin: "idea"
- //apply plugin: "org.scoverage"
-
- sourceCompatibility = 1.8
-
- //https://mvnrepository.com/repos
- repositories {
- mavenLocal()
- mavenCentral()
- maven { url 'https://maven.aliyun.com/repository/public' }
- maven { url 'https://jitpack.io' }
- google()
- maven { url "https://oss.sonatype.org/content/repositories/releases/" }
- maven { url "https://repository.cloudera.com/artifactory/cloudera-repos/" }
- }
-
- scala {
- zincVersion = "1.7.1"
- }
-
- /*scoverage {
- if (scalaVersion.startsWith("2.11")) {
- scoverageVersion = "1.4.11"
- } else {
- scoverageVersion = "2.0.8"
- }
- excludedFiles = [".*datasource.*", ".*Config", ".*Exception", ".*Accessor"]
- }*/
-
- dependencies {
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- }
-
- tasks.withType(ScalaCompile) {
- scalaCompileOptions.forkOptions.with {
- memoryMaximumSize = '1g'
- jvmArgs = ['-XX:MaxMetaspaceSize=512m', '-Xss10m', '-Xss512M']
- }
- scalaCompileOptions.additionalParameters = [
- "-language:postfixOps",
- "-deprecation", // Emit warning and location for usages of deprecated APIs.
- "-encoding", "utf-8", // Specify character encoding used by source files.
- "-explaintypes", // Explain type errors in more detail.
- "-feature", // Emit warning and location for usages of features that should be imported explicitly.
- "-language:existentials", // Existential types (besides wildcard types) can be written and inferred
- "-language:experimental.macros", // Allow macro definition (besides implementation and application)
- "-language:higherKinds", // Allow higher-kinded types
- "-language:implicitConversions", // Allow definition of implicit functions called views
- "-unchecked", // Enable additional warnings where generated code depends on assumptions.
- "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
- //"-Xfatal-warnings", // Fail the compilation if there are any warnings.
- "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
- "-Xlint:delayedinit-select", // Selecting member of DelayedInit.
- "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
- "-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
- "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
- "-Xlint:option-implicit", // Option.apply used implicit view.
- "-Xlint:package-object-classes", // Class or object defined in package object.
- "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
- "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
- "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
- "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
- "-Ywarn-dead-code", // Warn when dead code is identified.
- "-Ywarn-numeric-widen", // Warn when numerics are widened.
- "-Ywarn-unused",
- ]
- }
-}
diff --git a/core/build.gradle b/core/build.gradle
deleted file mode 100644
index 4c9ae29..0000000
--- a/core/build.gradle
+++ /dev/null
@@ -1,123 +0,0 @@
-plugins {
- id "java-library"
- id "scala"
- id "com.github.alisiikh.scalastyle"
- id "com.github.maiflai.scalatest"
-}
-
-group = 'com.github.sharpdata.sharpetl'
-sourceCompatibility = 1.8
-version = '0.2.0'
-
-dependencies {
- implementation "org.scala-lang:scala-compiler:$scalaCompt"
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang:scala-reflect:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "com.lihaoyi:fastparse_$scalaVersion:3.0.0"
- implementation 'io.github.classgraph:classgraph:4.8.149'
- implementation group: "log4j", name: "log4j", version: "1.2.17"
- if (sparkVersion.startsWith("3")) {
- implementation("org.apache.hadoop:hadoop-common") {
- version {
- require '3.3.1'
- }
- }
- } else {
- implementation("org.apache.hadoop:hadoop-common") {
- version {
- require '[2.7,2.8['
- prefer '2.7.2'
- }
- }
- }
- implementation "joda-time:joda-time:2.9.9"
- implementation "org.mybatis:mybatis:3.5.9"
- implementation 'com.zaxxer:HikariCP:2.6.1'
- implementation('io.github.coolbeevip:flyway-core:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation('io.github.coolbeevip:flyway-mysql:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation group: 'org.apache.poi', name: 'poi', version: '4.1.0'
- implementation group: 'org.apache.poi', name: 'poi-ooxml', version: '4.1.0'
- implementation 'org.apache.commons:commons-lang3:3.10'
- implementation 'info.picocli:picocli:4.6.3'
-
- if (sparkVersion.startsWith("3.5")) {
- implementation "io.circe:circe-yaml_$scalaVersion:0.15.0-RC1"
- implementation "io.circe:circe-generic_$scalaVersion:0.15.0-M1"
- implementation "io.circe:circe-generic-extras_$scalaVersion:0.14.3"
- } else {
- implementation "io.circe:circe-yaml_$scalaVersion:0.11.0-M1"
- implementation "io.circe:circe-generic_$scalaVersion:0.12.0-M3"
- implementation "io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3"
- }
- compileOnly 'org.projectlombok:lombok:1.18.22'
- annotationProcessor 'org.projectlombok:lombok:1.18.22'
- // https://mvnrepository.com/artifact/com.jcraft/jsch
- implementation group: 'com.jcraft', name: 'jsch', version: '0.1.55'
- // https://mvnrepository.com/artifact/javax.mail/mail
- implementation group: 'javax.mail', name: 'mail', version: '1.4.7'
- // encrypte properties file
- api 'org.jasypt:jasypt:1.9.3'
- implementation 'com.aliyun.oss:aliyun-sdk-oss:3.16.0'
-
-
- if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.0")) {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.10.0'
- }
- }
- } else if (sparkVersion.startsWith("3.2")) {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.12.3'
- }
- }
- } else if (sparkVersion.startsWith("3.3")) {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.13.3'
- }
- }
- } else if (sparkVersion.startsWith("3.4")) {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.14.2'
- }
- }
- } else if (sparkVersion.startsWith("3.5")) {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.15.2'
- }
- }
- } else {
- implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
- version {
- strictly '2.6.7.1'
- }
- }
- }
-
- testImplementation group: 'org.mockito', name: "mockito-scala_$scalaVersion", version: '1.16.39'
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-flatspec_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalactic", name: "scalactic_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.junit.jupiter", name: "junit-jupiter-api", version: "5.6.2"
- testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:5.6.2"
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
-}
-
-scalastyle {
- config = file("${rootDir}/scalastyle_config.xml") // path to scalastyle config xml file
- failOnWarning = true
- sourceSets {
- test {
- skip = true
- }
- }
-}
diff --git a/core/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java b/core/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java
deleted file mode 100644
index d71f8b4..0000000
--- a/core/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.classification;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-/**
- * Annotation to inform users of a package, class or method's intended audience.
- * Currently the audience can be {@link Public}, {@link LimitedPrivate} or
- * {@link Private}.
- * All public classes must have InterfaceAudience annotation.
- *
- * Public classes that are not marked with this annotation must be
- * considered by default as {@link Private}.
- *
- * External applications must only use classes that are marked
- * {@link Public}. Avoid using non public classes as these classes
- * could be removed or change in incompatible ways.
- *
- * Hadoop projects must only use classes that are marked
- * {@link LimitedPrivate} or {@link Public}
- *
- * Methods may have a different annotation that it is more restrictive
- * compared to the audience classification of the class. Example: A class
- * might be {@link Public}, but a method may be {@link LimitedPrivate}
- *
- */
-//@InterfaceAudience.Public
-//@InterfaceStability.Evolving
-//Work-around for scala bug: https://github.com/scala/bug/issues/12190
-public class InterfaceAudience {
- /**
- * Intended for use by any project or application.
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Public {};
-
- /**
- * Intended only for the project(s) specified in the annotation.
- * For example, "Common", "HDFS", "MapReduce", "ZooKeeper", "HBase".
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface LimitedPrivate {
- String[] value();
- };
-
- /**
- * Intended for use only within Hadoop itself.
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Private {};
-
- private InterfaceAudience() {} // Audience can't exist on its own
-}
diff --git a/core/src/main/java/org/apache/hadoop/classification/InterfaceStability.java b/core/src/main/java/org/apache/hadoop/classification/InterfaceStability.java
deleted file mode 100644
index d955101..0000000
--- a/core/src/main/java/org/apache/hadoop/classification/InterfaceStability.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.classification;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-
-/**
- * Annotation to inform users of how much to rely on a particular package,
- * class or method not changing over time. Currently the stability can be
- * {@link Stable}, {@link Evolving} or {@link Unstable}.
- *
- * All classes that are annotated with {@link Public} or
- * {@link LimitedPrivate} must have InterfaceStability annotation.
- * Classes that are {@link Private} are to be considered unstable unless
- * a different InterfaceStability annotation states otherwise.
- * Incompatible changes must not be made to classes marked as stable.
- *
- */
-//@InterfaceAudience.Public
-//@InterfaceStability.Evolving
-//Work-around for scala bug: https://github.com/scala/bug/issues/12190
-public class InterfaceStability {
- /**
- * Can evolve while retaining compatibility for minor release boundaries.;
- * can break compatibility only at major release (ie. at m.0).
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Stable {};
-
- /**
- * Evolving, but can break compatibility at minor release (i.e. m.x)
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Evolving {};
-
- /**
- * No guarantee is provided as to reliability or stability across any
- * level of release granularity.
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Unstable {};
-}
diff --git a/core/src/main/resources/db/flink/migration/V1__init.sql b/core/src/main/resources/db/flink/migration/V1__init.sql
deleted file mode 100644
index 6dd0171..0000000
--- a/core/src/main/resources/db/flink/migration/V1__init.sql
+++ /dev/null
@@ -1,57 +0,0 @@
-create table if not exists `sharp_etl`.job_log
-(
- job_id string,
- workflow_name string not null,
- `period` int not null,
- job_name string not null,
- data_range_start string,
- data_range_end string,
- job_start_time TIMESTAMP(9),
- job_end_time TIMESTAMP(9),
- status string not null comment 'job status: SUCCESS,FAILURE,RUNNING',
- create_time TIMESTAMP(9) comment 'log create time',
- last_update_time TIMESTAMP(9) comment 'log update time',
- load_type string,
- log_driven_type string,
- file string,
- application_id string,
- project_name string,
- runtime_args string,
- PRIMARY KEY (job_id) NOT ENFORCED
-);
-
-create table if not exists `sharp_etl`.quality_check_log
-(
- id string,
- job_id string not null,
- job_name string not null comment 'job name(workflow_name + period)',
- `column` string not null comment 'issue column name',
- data_check_type string not null,
- ids string not null comment 'issue data primary key, concat by `, `, multiple primary key will be concat by `__`',
- error_type string not null comment 'warn/error',
- warn_count bigint,
- error_count bigint,
- create_time TIMESTAMP(9) comment 'log create time',
- last_update_time TIMESTAMP(9) comment 'log update time',
- PRIMARY KEY (id) NOT ENFORCED
-);
-
-create table if not exists `sharp_etl`.step_log
-(
- job_id string not null,
- step_id string not null,
- status string not null,
- start_time TIMESTAMP(9) not null,
- end_time TIMESTAMP(9),
- duration int not null,
- output string not null,
- source_count bigint,
- target_count bigint,
- success_count bigint comment 'success data count',
- failure_count bigint comment 'failure data count',
- error string,
- source_type string,
- target_type string,
- PRIMARY KEY (job_id, step_id) NOT ENFORCED
-);
-
diff --git a/core/src/main/resources/db/mysql/migration/V1__init.sql b/core/src/main/resources/db/mysql/migration/V1__init.sql
deleted file mode 100644
index b5e5748..0000000
--- a/core/src/main/resources/db/mysql/migration/V1__init.sql
+++ /dev/null
@@ -1,56 +0,0 @@
-create table job_log
-(
- job_id varchar(128) primary key,
- workflow_name varchar(128) charset utf8 not null,
- `period` int not null,
- job_name varchar(128) charset utf8 not null,
- data_range_start varchar(128) charset utf8 null,
- data_range_end varchar(128) charset utf8 null,
- job_start_time datetime null,
- job_end_time datetime null,
- status varchar(32) charset utf8 not null comment 'job status: SUCCESS,FAILURE,RUNNING',
- create_time datetime default CURRENT_TIMESTAMP not null comment 'log create time',
- last_update_time datetime default CURRENT_TIMESTAMP not null comment 'log update time',
- load_type varchar(32) null,
- log_driven_type varchar(32) null,
- file text charset utf8 null,
- application_id varchar(64) charset utf8 null,
- project_name varchar(64) charset utf8 null,
- runtime_args text charset utf8 null
-) charset = utf8;
-
-create table quality_check_log
-(
- id bigint auto_increment
- primary key,
- job_id varchar(128) not null,
- job_name varchar(64) charset utf8 not null comment 'job name(workflow_name + period)',
- `column` varchar(64) charset utf8 not null comment 'issue column name',
- data_check_type varchar(64) charset utf8 not null,
- ids text charset utf8 not null comment 'issue data primary key, concat by `, `, multiple primary key will be concat by `__`',
- error_type varchar(16) charset utf8 not null comment 'warn/error',
- warn_count bigint null,
- error_count bigint null,
- create_time datetime default CURRENT_TIMESTAMP not null comment 'log create time',
- last_update_time datetime default CURRENT_TIMESTAMP not null comment 'log update time'
-) charset = utf8;
-
-create table step_log
-(
- job_id varchar(128) not null,
- step_id varchar(64) not null,
- status varchar(32) not null,
- start_time datetime not null,
- end_time datetime null,
- duration int(11) unsigned not null,
- output text not null,
- source_count bigint null,
- target_count bigint null,
- success_count bigint null comment 'success data count',
- failure_count bigint null comment 'failure data count',
- error text null,
- source_type varchar(32) null,
- target_type varchar(32) null,
- primary key (job_id, step_id)
-) charset = utf8;
-
diff --git a/core/src/main/resources/db/spark/migration/V1__init.sql b/core/src/main/resources/db/spark/migration/V1__init.sql
deleted file mode 100644
index 6db5650..0000000
--- a/core/src/main/resources/db/spark/migration/V1__init.sql
+++ /dev/null
@@ -1,54 +0,0 @@
-create table `sharp_etl`.job_log
-(
- job_id string,
- workflow_name string not null,
- `period` int not null,
- job_name string not null,
- data_range_start string,
- data_range_end string,
- job_start_time timestamp,
- job_end_time timestamp,
- status string not null comment 'job status: SUCCESS,FAILURE,RUNNING',
- create_time timestamp comment 'log create time',
- last_update_time timestamp comment 'log update time',
- load_type string,
- log_driven_type string,
- file string,
- application_id string,
- project_name string,
- runtime_args string
-) using delta;
-
-create table `sharp_etl`.quality_check_log
-(
- id bigint,
- job_id string not null,
- job_name string not null comment 'job name(workflow_name + period)',
- `column` string not null comment 'issue column name',
- data_check_type string not null,
- ids string not null comment 'issue data primary key, concat by `, `, multiple primary key will be concat by `__`',
- error_type varchar(16) not null comment 'warn/error',
- warn_count bigint,
- error_count bigint,
- create_time timestamp comment 'log create time',
- last_update_time timestamp comment 'log update time'
-) using delta;
-
-create table `sharp_etl`.step_log
-(
- job_id string not null,
- step_id string not null,
- status string not null,
- start_time timestamp not null,
- end_time timestamp,
- duration int not null,
- output string not null,
- source_count bigint,
- target_count bigint,
- success_count bigint comment 'success data count',
- failure_count bigint comment 'failure data count',
- error string,
- source_type string,
- target_type string
-) using delta;
-
diff --git a/core/src/main/resources/db/sqlserver/migration/V1__init.sql b/core/src/main/resources/db/sqlserver/migration/V1__init.sql
deleted file mode 100644
index d71d2da..0000000
--- a/core/src/main/resources/db/sqlserver/migration/V1__init.sql
+++ /dev/null
@@ -1,60 +0,0 @@
-create table sharp_etl.job_log
-(
- job_id nvarchar(128) identity primary key,
- workflow_name nvarchar(128) not null,
- "period" int not null,
- job_name nvarchar(128) not null,
- data_range_start nvarchar(128),
- data_range_end nvarchar(128),
- job_start_time datetime,
- job_end_time datetime,
- status nvarchar(32) not null,
- create_time datetime default getdate() not null,
- last_update_time datetime default getdate() not null,
- load_type varchar(32),
- log_driven_type varchar(32),
- "file" nvarchar(512),
- application_id nvarchar(64),
- project_name nvarchar(64),
- runtime_args nvarchar(512)
-)
-go
-
-create table sharp_etl.quality_check_log
-(
- id bigint identity
- constraint PK_QCLog_TransactionID
- primary key,
- job_id nvarchar(128) not null,
- job_name nvarchar(64) not null,
- [column] nvarchar(64) not null,
- data_check_type nvarchar(64) not null,
- ids nvarchar(max) not null,
- error_type nvarchar(16) not null,
- warn_count bigint,
- error_count bigint,
- create_time datetime default getdate() not null,
- last_update_time datetime default getdate() not null
-)
-go
-
-create table sharp_etl.step_log
-(
- job_id nvarchar(128) not null,
- step_id varchar(64) not null,
- status varchar(32) not null,
- start_time datetime not null,
- end_time datetime,
- duration int not null,
- output varchar(max) not null,
- source_count bigint,
- target_count bigint,
- success_count bigint,
- failure_count bigint,
- error nvarchar(max),
- source_type nvarchar(32),
- target_type nvarchar(32),
- constraint PK_StepLog_TransactionID
- primary key (job_id, step_id)
-)
-go
\ No newline at end of file
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/AnnotationScanner.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/AnnotationScanner.scala
deleted file mode 100644
index 50619a8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/AnnotationScanner.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.github.sharpdata.sharpetl.core.annotation
-
-import com.github.sharpdata.sharpetl.core.datasource.config.{DBDataSourceConfig, DataSourceConf, DataSourceConfig}
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Private
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import io.github.classgraph.ClassGraph
-
-import scala.jdk.CollectionConverters._
-
-@Private
-object AnnotationScanner {
-
- val defaultConfigType: Class[DataSourceConfig] = classOf[DBDataSourceConfig].asInstanceOf[Class[DataSourceConfig]]
-
- lazy val tempConfig = {
- val conf = new DBDataSourceConfig()
- conf.dataSourceType = "temp"
- conf
- }
-
- val configRegister: Map[String, Class[DataSourceConfig]] =
- new ClassGraph()
- .acceptPackages("com.github.sharpdata.sharpetl")
- .enableClassInfo()
- .enableAnnotationInfo()
- .scan()
- .getClassesImplementing(classOf[DataSourceConf])
- .getNames
- .asScala
- .map(clazz => {
- try {
- val `class` = Class.forName(clazz)
- val annotation = `class`.getAnnotation(classOf[configFor])
- if (annotation == null) {
- None
- } else {
- val sourceTypes = annotation.types()
- Some(`class`.asInstanceOf[Class[DataSourceConfig]], sourceTypes)
- }
- } catch {
- case _: Throwable => None
- }
- })
- .filter(_.nonEmpty)
- .map(_.get.swap)
- .flatMap(it => it._1.map {
- `type` => (`type`, it._2)
- })
- .toMap
-
- val sourceRegister: Map[String, Class[Source[_, _]]] = {
- new ClassGraph()
- .acceptPackages("com.github.sharpdata.sharpetl")
- .enableClassInfo()
- .enableAnnotationInfo()
- .scan()
- .getClassesImplementing(classOf[Source[_, _]])
- .getNames
- .asScala
- .map(clazz => {
- try {
- val `class` = Class.forName(clazz)
- val annotation = `class`.getAnnotation(classOf[source])
- if (annotation == null) {
- None
- } else {
- val sourceTypes = annotation.types()
- Some(`class`.asInstanceOf[Class[Source[_, _]]], sourceTypes)
- }
- } catch {
- case _: Throwable => None
- }
- })
- .filter(_.nonEmpty)
- .map(_.get.swap)
- .flatMap(it => it._1.map {
- `type` => (`type`, it._2)
- })
- .toMap
- }
-
- val sinkRegister: Map[String, Class[Sink[_]]] =
- new ClassGraph()
- .acceptPackages("com.github.sharpdata.sharpetl")
- .enableClassInfo()
- .enableAnnotationInfo()
- .scan()
- .getClassesImplementing(classOf[Sink[_]])
- .getNames
- .asScala
- .map(clazz => {
- try {
- val `class` = Class.forName(clazz)
- val annotation = `class`.getAnnotation(classOf[sink])
- if (annotation == null) {
- None
- } else {
- val sourceTypes = annotation.types()
- Some(`class`.asInstanceOf[Class[Sink[_]]], sourceTypes)
- }
- } catch {
- case _: Throwable => None
- }
- })
- .filter(_.nonEmpty)
- .map(_.get.swap)
- .flatMap(it => it._1.map {
- `type` => (`type`, it._2)
- })
- .toMap
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/Annotations.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/Annotations.scala
deleted file mode 100644
index dd186d8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/Annotations.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.github.sharpdata.sharpetl.core.annotation
-
-object Annotations {
- final case class Stable(since: String) extends scala.annotation.StaticAnnotation
-
- final case class Evolving(since: String) extends scala.annotation.StaticAnnotation
-
- final case class Experimental(message: String, since: String) extends scala.annotation.StaticAnnotation
-
- final case class Private() extends scala.annotation.StaticAnnotation
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/configFor.java b/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/configFor.java
deleted file mode 100644
index 93bd713..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/configFor.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.github.sharpdata.sharpetl.core.annotation;
-
-import java.lang.annotation.*;
-
-@Target({ElementType.TYPE})
-@Retention(RetentionPolicy.RUNTIME)
-public @interface configFor {
- String[] types();
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/sink.java b/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/sink.java
deleted file mode 100644
index d00af89..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/sink.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.github.sharpdata.sharpetl.core.annotation;
-
-import java.lang.annotation.*;
-
-@Target({ElementType.TYPE})
-@Retention(RetentionPolicy.RUNTIME)
-public @interface sink {
- String[] types();
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/source.java b/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/source.java
deleted file mode 100644
index 5763f30..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/annotation/source.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.github.sharpdata.sharpetl.core.annotation;
-
-import java.lang.annotation.*;
-
-@Target({ElementType.TYPE})
-@Retention(RetentionPolicy.RUNTIME)
-public @interface source {
- String[] types();
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala
deleted file mode 100644
index 3173ca4..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala
+++ /dev/null
@@ -1,376 +0,0 @@
-package com.github.sharpdata.sharpetl.core.api
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.cli.{CommonCommand, SingleJobCommand}
-import com.github.sharpdata.sharpetl.core.exception.Exception._
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobStatus.RUNNING
-import com.github.sharpdata.sharpetl.core.repository.model.{JobLog, JobStatus}
-import com.github.sharpdata.sharpetl.core.syntax.{Workflow, WorkflowStep}
-import com.github.sharpdata.sharpetl.core.util.Constants.IncrementalType
-import com.github.sharpdata.sharpetl.core.util.Constants.IncrementalType.UPSTREAM
-import com.github.sharpdata.sharpetl.core.util.Constants.Job.nullDataTime
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{BigIntToLocalDateTime, LocalDateTimeToBigInt}
-import com.github.sharpdata.sharpetl.core.util.IncIdUtil.NumberStringPadding
-import com.github.sharpdata.sharpetl.core.util.JobLogUtil.JobLogFormatter
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{BigIntConverter, isNullOrEmpty, uuid}
-import com.github.sharpdata.sharpetl.core.util._
-
-import java.math.BigInteger
-import java.time.LocalDateTime
-import java.time.temporal.ChronoUnit
-import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-
-@Stable(since = "1.0.0")
-final case class LogDrivenInterpreter(
- workflow: Workflow,
- workflowInterpreter: WorkflowInterpreter[_],
- jobLogAccessor: JobLogAccessor = jobLogAccessor,
- command: CommonCommand
- ) {
-
- @inline private def workflowName: String = workflow.name
-
- private lazy val period = {
- if (command.period > 0) {
- command.period
- } else {
- Option(workflow).map(_.period).map(Option(_)).flatten.map(_.toInt).getOrElse(command.period)
- }
- }
-
- /**
- * 任务执行主入口
- */
- def eval(): WfEvalResult = {
- val logQueue = command match {
- case cmd: CommonCommand if cmd.refresh =>
- logDrivenPlan(Some(cmd.refreshRangeStart), Some(cmd.refreshRangeEnd))
- case _ =>
- if (command.once) {
- logDrivenPlan().headOption.toSeq
- } else if (command.latestOnly) {
- logDrivenPlan().reverse.headOption.toSeq
- } else {
- logDrivenPlan()
- }
- }
- WfEvalResult(workflow, tailrecApply(logQueue, checkRunningAndEval))
- }
-
- /**
- * 检查上次运行到那里了,来判断这一次从哪里开始执行
- */
- // scalastyle:off
- def logDrivenPlan(startTimeStr: Option[String] = None, endTimeStr: Option[String] = None): Seq[JobLog] = {
- val lastJob = if (startTimeStr.isDefined && endTimeStr.isDefined) {
- None
- } else {
- Option(jobLogAccessor.lastSuccessExecuted(workflowName))
- }
- val logDrivenType = {
- if (isNullOrEmpty(command.logDrivenType)) {
- Option(workflow).map(_.logDrivenType).getOrElse(IncrementalType.TIMEWINDOW)
- } else {
- command.logDrivenType
- }
-
- }
- logDrivenType match {
- case _: String if workflow != null && workflow.stopScheduleWhenFail && lastJob.isDefined && lastJob.get.status == JobStatus.FAILURE =>
- ETLLogger.warn("Prev job schedule is failed, and stopScheduleWhenFail is true, so we won't schedule the next run.")
- Seq()
- case IncrementalType.AUTO_INC_ID => autoIncIdBasedQueue(lastJob, logDrivenType, startTimeStr, endTimeStr)
- case IncrementalType.KAFKA_OFFSET => kafkaOffsetBasedQueue(startTimeStr, endTimeStr)
- case IncrementalType.UPSTREAM if !isNullOrEmpty(workflow.upstream) => dependOnUpstreamBasedQueue(lastJob, logDrivenType)
- case _ => timeBasedExecuteQueue(lastJob, logDrivenType, startTimeStr, endTimeStr)
- }
- }
- // scalastyle:on
-
- private def kafkaOffsetBasedQueue(startTimeStr: Option[String] = None, endTimeStr: Option[String] = None) = {
- type OffsetRange = Map[String, Map[String, Int]]
- val mapper = new ObjectMapper()
- mapper.registerModule(DefaultScalaModule)
- val jobLogs = jobLogAccessor.executionsLastYear(workflowName)
-
- val (dataRangeStart, jobScheduleId) =
- if (startTimeStr.isDefined) {
- //refresh or custom offset
- (startTimeStr.get, s"$workflowName-${startTimeStr.get}")
- } else if (jobLogs.isEmpty) {
- // not exist => from offset 0
- ("earliest", s"$workflowName-earliest")
- } else {
- // exist => OffsetRange => ordered => latest => options
- val dataRangeEnds = jobLogs.toList.map(_.dataRangeEnd).filter(it => it != "earliest")
- if (dataRangeEnds.isEmpty) {
- ("earliest", s"$workflowName-earliest")
- } else {
- val offsetStart = dataRangeEnds
- .map(str => {
- mapper.readValue(str, classOf[OffsetRange])
- })
- .map(it => (it.values.map(_.values.sum).sum, it))
- .maxBy(_._1)
- ._2
- val jobScheduleId = s"$workflowName-${
- offsetStart.map {
- case (topic: String, partitionToOffset: Map[String, Int]) =>
- partitionToOffset.map { case (partition, offset) => s"$topic-$partition-$offset" }
- }
- .mkString("-")
- }"
- (mapper.writeValueAsString(offsetStart), jobScheduleId)
- }
- }
-
- Seq(
- new JobLog(
- jobId = uuid, workflowName = workflowName,
- period = period, jobName = jobScheduleId,
- dataRangeStart = dataRangeStart, dataRangeEnd = endTimeStr.getOrElse("latest"), // update `dataRangeEnd` in [[BatchKafkaDataSource.read()]]
- jobStartTime = nullDataTime, jobEndTime = nullDataTime,
- status = RUNNING, createTime = nullDataTime,
- lastUpdateTime = nullDataTime,
- logDrivenType = IncrementalType.KAFKA_OFFSET,
- file = "", applicationId = workflowInterpreter.applicationId(),
- projectName = workflow.getProjectName(),
- loadType = workflow.loadType,
- runtimeArgs = command.commandStr.toString
- )
- )
- }
-
-
- private def autoIncIdBasedQueue(lastJob: Option[JobLog], incrementalType: String,
- startTimeStr: Option[String] = None, endTimeStr: Option[String] = None) = {
- val startFrom = startTimeStr.getOrElse {
- lastJob.map(_.dataRangeEnd)
- .getOrElse(Option(command.defaultStart)
- .getOrElse(if (isNullOrEmpty(workflow.defaultStart)) "0" else workflow.defaultStart)
- .padding()
- )
- }
- val jobScheduleId = s"$workflowName-$startFrom"
- Seq(
- new JobLog(
- jobId = uuid, workflowName = workflowName,
- period = period, jobName = jobScheduleId,
- dataRangeStart = startFrom.padding(), dataRangeEnd = endTimeStr.getOrElse("0").padding(),
- jobStartTime = nullDataTime, jobEndTime = nullDataTime,
- status = RUNNING, createTime = nullDataTime,
- lastUpdateTime = nullDataTime,
- logDrivenType = incrementalType,
- file = "", applicationId = workflowInterpreter.applicationId(),
- projectName = workflow.getProjectName(),
- loadType = workflow.loadType,
- runtimeArgs = command.commandStr.toString
- )
- )
- }
-
- private def dependOnUpstreamBasedQueue(lastJob: Option[JobLog], incrementalType: String) = {
- val upstreamJobName = workflow.upstream
- // 1. 先获取本层log数据的upstream_log_id,该数据可以默认从data_range_start获取
- val upstreamLogId = if (lastJob.isDefined) lastJob.get.dataRangeStart.asBigInt else "0".asBigInt
- // 2. 根据command中的upstream的job_name,以及上一步的upstream_log_id,来获取待执行的upstream_log_id
- ETLLogger.info(s"Current job depend on upstream job $upstreamJobName")
- val unprocessedJobLogs = jobLogAccessor.getUnprocessedUpstreamJobLog(upstreamJobName, upstreamLogId)
- // 3. 创建 JobLog对象
- unprocessedJobLogs.map {
- jobLog => {
- dependOnUpstreamScheduleJob(jobLog.jobId, incrementalType)
- }
- }.toList
- }
-
-
- private def timeBasedExecuteQueue(lastJob: Option[JobLog], logDrivenType: String,
- startTimeStr: Option[String] = None, endTimeStr: Option[String] = None): List[JobLog] = {
- val startTime = startTimeStr.map(str => str.asBigInt.asLocalDateTime()).getOrElse(getStartTime(lastJob))
- val shouldScheduleTimes = ceilingScheduleTimes(startTime, period, endTimeStr)
- if (shouldScheduleTimes == 0) {
- ETLLogger.warn(s"Last job($workflowName)'s data range end is $startTime," +
- s" now is ${LocalDateTime.now()}, there is no plan to schedule next run for job($workflowName)")
- } else if (shouldScheduleTimes > 50) {
- ETLLogger.warn(s"Last job($workflowName)'s data range end is $startTime," +
- s" now is ${LocalDateTime.now()}, there is $shouldScheduleTimes job($workflowName) will to be scheduled.")
- }
- val realLogDrivenType = if (isNullOrEmpty(logDrivenType) ||
- (logDrivenType == UPSTREAM && isNullOrEmpty(workflow.upstream))) {
- ETLLogger.warn(s"logDrivenType was $logDrivenType, will be re-set to ${IncrementalType.TIMEWINDOW}," +
- s" might because upstream job not set, upstream job is ${workflow.upstream}")
- IncrementalType.TIMEWINDOW
- } else {
- logDrivenType
- }
- 1.to(shouldScheduleTimes).map { idx =>
- scheduleJob(startTime, period, idx, realLogDrivenType)
- }.toList
- }
-
- def checkRunningAndEval(jobLog: JobLog): JobLog = {
- val runningJob = jobLogAccessor.isAnotherJobRunning(jobLog.jobName)
- if (runningJob == null) {
- evalWorkflow(jobLog)
- } else {
- if (!command.skipRunning) {
- runningJob.failed()
- jobLogAccessor.update(runningJob)
- evalWorkflow(jobLog)
- } else {
- throw AnotherJobIsRunningException(s"Exception thrown when another job(${runningJob.jobId}) workflowName: ${runningJob.jobName} is running")
- }
- }
- }
-
- private def evalWorkflow(jobLog: JobLog): JobLog = {
- ETLLogger.info(s"Start evaluating workflow : ${jobLog.workflowName}... ...")
- try {
- jobLogAccessor.create(jobLog)
- val start = jobLog.formatDataRangeStart()
- val end = jobLog.formatDataRangeEnd()
- val variables = Variables(
- collection.mutable.Map(
- ("${DATA_RANGE_END}", end),
- ("${DATA_RANGE_START}", start),
- ("${JOB_ID}", jobLog.jobId.toString),
- ("${JOB_NAME}", jobLog.jobName),
- ("${WORKFLOW_NAME}", jobLog.workflowName)
- ) ++ jobLog.defaultTimePartition()
- )
- Option(workflow).foreach(wf =>
- ETLLogger.info(s"[Workflow Header]: \n${wf.headerStr}")
- )
- workflowInterpreter
- .evalSteps(
- dropStep(Option(workflow).map(_.steps).getOrElse(Nil)),
- jobLog,
- variables,
- start,
- end
- )
- jobLog.success()
- } catch {
- case _: NoFileSkipException =>
- ETLLogger.warn("Job won't checkRunningAndEval any files because there are no files to be proceed and `throwExceptionIfEmpty` is false")
- jobLog.success()
- case e: NoFileToContinueException =>
- ETLLogger.warn("Job can not get any files!")
- jobLog.failed()
- throw e
- case e: StepFailedException =>
- ETLLogger.error(s"Job failed at step ${e.step}", e)
- jobLog.failed()
- throw e
- case e: Throwable =>
- ETLLogger.error(s"Unknown error", e)
- jobLog.failed()
- throw e
- } finally {
- jobLog.jobEndTime = LocalDateTime.now()
- jobLogAccessor.update(jobLog)
- }
- jobLog
- }
-
- private def scheduleJob(startTime: LocalDateTime, execPeriod: Int, idx: Int, incrementalType: String): JobLog = {
- val dataRangeStart = startTime.plus((idx - 1) * execPeriod, ChronoUnit.MINUTES).asBigInt().toString
- val dataRangeEnd = startTime.plus(idx * execPeriod, ChronoUnit.MINUTES).asBigInt().toString
- val jobScheduleId = s"$workflowName-$dataRangeStart"
- new JobLog(
- jobId = uuid, workflowName = workflowName,
- period = execPeriod, jobName = jobScheduleId,
- dataRangeStart = dataRangeStart, dataRangeEnd = dataRangeEnd,
- jobStartTime = nullDataTime, jobEndTime = nullDataTime,
- status = RUNNING, createTime = nullDataTime,
- lastUpdateTime = nullDataTime,
- logDrivenType = incrementalType,
- file = "", applicationId = workflowInterpreter.applicationId(),
- projectName = workflow.getProjectName(),
- loadType = workflow.loadType,
- runtimeArgs = command.commandStr.toString
- )
- }
-
- private def dependOnUpstreamScheduleJob(upstreamLogId: String, incrementalType: String): JobLog = {
- val dataRangeStart = upstreamLogId.toString()
- val jobScheduleId = s"$workflowName-$dataRangeStart"
- new JobLog(
- jobId = uuid, workflowName = workflowName,
- period = period, jobName = jobScheduleId,
- dataRangeStart = dataRangeStart, dataRangeEnd = "",
- jobStartTime = nullDataTime, jobEndTime = nullDataTime,
- status = RUNNING, createTime = nullDataTime,
- lastUpdateTime = nullDataTime,
- logDrivenType = incrementalType,
- file = "", applicationId = workflowInterpreter.applicationId(),
- projectName = workflow.getProjectName(),
- loadType = workflow.loadType,
- runtimeArgs = command.commandStr.toString
- )
- }
-
- private def getStartTime(lastJob: Option[JobLog]): LocalDateTime = {
- if (lastJob.isEmpty) {
- Option(command.defaultStart)
- .map(
- new BigInteger(_).asLocalDateTime()
- )
- .getOrElse(
- if (isNullOrEmpty(workflow.defaultStart)) {
- LocalDateTime.parse("2022-01-01T00:00:00")
- } else {
- new BigInteger(workflow.defaultStart).asLocalDateTime()
- }
- )
- } else {
- lastJob.get.dataRangeEnd.asBigInt.asLocalDateTime()
- }
- }
-
- private def ceilingScheduleTimes(startTime: LocalDateTime, execPeriod: Int, endTimeStr: Option[String] = None): Int = {
- Option(execPeriod)
- .filterNot(_ == 0)
- .map(period => startTime.until(endTimeStr.map(str => str.asBigInt.asLocalDateTime()).getOrElse(LocalDateTime.now()), ChronoUnit.MINUTES).toInt / period)
- .getOrElse(1)
- }
-
- def tailrecApply[A](seq: Seq[A], f: A => A): Seq[Try[A]] = {
- @tailrec
- def loop(seq: Seq[A], acc: ListBuffer[Try[A]]): Seq[Try[A]] = {
- seq match {
- case head :: tail =>
- Try(f, head) match {
- case r@Success(_) => loop(tail, acc :+ r)
- case f@Failure(_, _) =>
- acc += f
- tail.foreach(it => acc += Skipped(it))
- acc.toSeq
- }
- case Nil => acc.toSeq
- }
- }
-
- loop(seq, ListBuffer[Try[A]]())
- }
-
- def dropStep(steps: List[WorkflowStep]): List[WorkflowStep] = {
- val fromSteps = command match {
- case cmd: SingleJobCommand if !isNullOrEmpty(cmd.fromStep) =>
- steps.slice(cmd.fromStep.toInt - 1, steps.size)
- case _ => steps
- }
- command match {
- case cmd: SingleJobCommand if !isNullOrEmpty(cmd.excludeSteps) =>
- val exclude = cmd.excludeSteps.split(",").map(_.trim).toSet
- fromSteps.filterNot(it => exclude.contains(it.step))
- case _ => fromSteps
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/Variables.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/Variables.scala
deleted file mode 100644
index 684f39b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/Variables.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.github.sharpdata.sharpetl.core.api
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-
-@Stable(since = "1.0.0")
-final case class Variables(private val value: collection.mutable.Map[String, String]) {
- def put(k: String, v: String): Unit = value.put(k, v)
-
- def filter(function: ((String, String)) => Boolean): Variables = Variables(value.filter(function))
-
- def apply(key: String): String = value(key)
-
- def foreach[U](f: (String, String) => U): Unit = value.foreach {
- case (k, v) => f(k, v)
- }
-
- def contains(key: String): Boolean = {
- value.contains(key)
- }
-
- def getOrElse(key: String, default: () => String): String = {
- value.getOrElse(key, default())
- }
-
- def getOrElse(key: String, default: String): String = {
- value.getOrElse(key, default)
- }
-
- def ++(xs: Map[String, String]): Variables = Variables(value ++ xs) // scalastyle:off
-
- def +=(kv: (String, String)): Variables = {
- value += kv
- this
- }
-}
-
-object Variables {
- def empty: Variables = Variables(collection.mutable.Map())
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WfEvalResult.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WfEvalResult.scala
deleted file mode 100644
index c63bfc8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WfEvalResult.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.github.sharpdata.sharpetl.core.api
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Evolving
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.{Formatable, Workflow}
-import com.github.sharpdata.sharpetl.core.util.{Failure, Skipped, Try}
-
-@Evolving("1.0.0")
-final case class WfEvalResult(workflow: Workflow, jobLogs: Seq[Try[JobLog]]) extends Formatable {
-
- override def toString: String = formatString.mkString("\n")
-
- def formatString: Seq[String] = {
- if (jobLogs.nonEmpty) {
- val workflowName = jobLogs.head.get.workflowName
- jobLogs.groupBy(_.getClass.getSimpleName)
- .map { case (status, seq) =>
- status match {
- case "Success" => s"""workflow name: $workflowName SUCCESS x ${seq.size}"""
- case "Failure" =>
- s"""workflow name: $workflowName FAILURE x ${seq.size}, job id: ${seq.head.asInstanceOf[Failure[JobLog]].result.jobId}
- | error: ${seq.head.asInstanceOf[Failure[JobLog]].e.getMessage}""".stripMargin
- case "Skipped" =>
- s"""workflow name: $workflowName SKIPPED x ${seq.size}
- |from data range start ${seq.head.asInstanceOf[Skipped[JobLog]].result.dataRangeStart}""".stripMargin
- }
- }.toSeq
- } else {
- Seq()
- }
- }
-}
-
-object WfEvalResult {
- def throwFirstException(results: Seq[WfEvalResult]): Unit = {
- results
- .flatMap(_.jobLogs)
- .foreach {
- case Failure(_, throwable) => throw throwable
- case _ => ()
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WorkflowInterpreter.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WorkflowInterpreter.scala
deleted file mode 100644
index 93d339d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/WorkflowInterpreter.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-package com.github.sharpdata.sharpetl.core.api
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.{BooleanString, DataSourceType, IncrementalType}
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.{Private, Stable}
-import com.github.sharpdata.sharpetl.core.api.WorkflowInterpreter._
-import com.github.sharpdata.sharpetl.core.datasource.config.{DataSourceConfig, FileDataSourceConfig, TransformationDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.exception.Exception._
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck
-import com.github.sharpdata.sharpetl.core.repository.StepLogAccessor.stepLogAccessor
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil.downloadFileToHDFS
-import com.github.sharpdata.sharpetl.core.util.IncIdUtil.NumberStringPadding
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, StringUtil}
-import com.google.common.base.Strings.isNullOrEmpty
-import org.apache.commons.lang3.SerializationUtils
-import org.apache.commons.lang3.reflect.FieldUtils
-
-import java.lang.reflect.Field
-
-@Stable(since = "1.0.0")
-trait WorkflowInterpreter[DataFrame] extends Serializable with QualityCheck[DataFrame] with AutoCloseable { // scalastyle:ignore
-
- def evalSteps(steps: List[WorkflowStep],
- jobLog: JobLog,
- variables: Variables,
- start: String,
- end: String): Unit = {
- try {
- steps
- .foreach(step => evalStep(step, jobLog, variables, start, end))
- } catch {
- case _: EmptyDataException =>
- if (steps.exists(step => step.skipFollowStepWhenEmpty == true.toString)) { // scalastyle:ignore
- ETLLogger.info("Data empty! and skip follow steps!")
- }
- case ex: Exception => throw ex
- }
- }
-
- // scalastyle:off
- def evalStep(step: WorkflowStep, jobLog: JobLog, variables: Variables, start: String, end: String): Unit = {
-
- val stepLog = jobLog.createStepLog(step.step)
- stepLog.setSourceType(step.source.dataSourceType)
- stepLog.setTargetType(step.target.dataSourceType)
- stepLogAccessor.create(stepLog)
-
- def doRead(step: WorkflowStep, variables: Variables): DataFrame = {
- applyConf(step.conf)
- updateVariablesForRefreshAutoIncMode(jobLog, variables)
- prepareSql(jobLog, variables, start, end, step)
- stepLog.info(s"[Step]: \n$step")
- stepLogAccessor.update(stepLog)
- read(jobLog, variables, step)
- }
-
- try {
- val df =
- if (!isNullOrEmpty(step.loopOver)) {
- executeSqlToVariables(s"SELECT * FROM `${step.loopOver}`")
- .map { newVariables =>
- val newStep = SerializationUtils.clone(step)
- doRead(newStep, variables.++(newVariables))
- }
- .reduce((left, right) => union(left, right))
- } else {
- doRead(step, variables)
- }
-
- if (df != null) {
- /**
- * for [[IncrementalType.AUTO_INC_ID]] job to store max id, if is refresh job, end should not be "0".padding()
- */
- if (jobLog.logDrivenType == IncrementalType.AUTO_INC_ID && variables.contains("${upperBound}")) {
- if (jobLog.dataRangeEnd == "0".padding()) {
- jobLog.dataRangeEnd = variables("${upperBound}").padding()
- ETLLogger.info(s"Setting dataRangeEnd to ${jobLog.dataRangeEnd}")
- }
- }
- val passed = {
- if (step.source.options.keys.toSeq.exists(_.contains("qualityCheckRules"))) {
- qualityCheck(
- step,
- jobLog.jobId,
- jobLog.jobName,
- df
- ).passed
- } else {
- df
- }
- }
- executeWrite(jobLog, passed, step, variables)
- }
- stepLog.success()
- } catch {
- case e: NoFileFoundException =>
- stepLog.failed(e)
- if (step.throwExceptionIfEmpty == BooleanString.TRUE) {
- throw NoFileToContinueException(step.step)
- } else {
- throw NoFileSkipException(step.step)
- }
- case e: EmptyDataException =>
- if (step.skipFollowStepWhenEmpty == true.toString) {
- stepLog.failed(e)
- throw e
- }
- stepLog.success()
- case t: Throwable =>
- stepLog.failed(t)
- throw StepFailedException(t, step.step)
- } finally {
- stepLogAccessor.update(stepLog)
- }
- }
-
- // scalastyle:on
-
- def read(jobLog: JobLog,
- variables: Variables,
- step: WorkflowStep): DataFrame = {
- step.getSourceConfig.getDataSourceType match {
- case DataSourceType.HDFS |
- DataSourceType.JSON |
- DataSourceType.EXCEL |
- DataSourceType.CSV |
- DataSourceType.FTP |
- DataSourceType.SCP =>
- val files = listFiles(step)
- jobLog.file = files.map(StringUtil.getFileNameFromPath).mkString(",")
- val df = readFile(step, jobLog, variables, files)
- cleanUpTempFiles(step, files)
- df
- case DataSourceType.SFTP =>
- ETLLogger.info("Be run file list: \n%s".format(jobLog.file))
- downloadFileToHDFS(step, jobLog, variables)
- null.asInstanceOf[DataFrame] // scalastyle:ignore
- case DataSourceType.MOUNT =>
- ETLLogger.info("Be run file list: \n%s".format(jobLog.file))
- val hdfsPaths = downloadFileToHDFS(step, jobLog, variables)
- variables.put("FILE_PATHS", hdfsPaths.mkString(","))
- null.asInstanceOf[DataFrame] // scalastyle:ignore
- case _ =>
- executeRead(step, jobLog, variables)
- }
- }
-
- @deprecated def listFiles(step: WorkflowStep): List[String]
-
- @deprecated def cleanUpTempFiles(step: WorkflowStep,
- files: List[String]): Unit = {
- val sourceConfig = step.getSourceConfig[FileDataSourceConfig]
- if (sourceConfig.deleteSource.toBoolean) {
- files.foreach { filePath =>
- sourceConfig.setFilePath(filePath)
- deleteSource(step)
- }
- }
- }
-
- @deprecated def deleteSource(step: WorkflowStep): Unit
-
- @deprecated def readFile(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables,
- files: List[String]): DataFrame
-
-
- def executeWrite(jobLog: JobLog,
- df: DataFrame,
- step: WorkflowStep,
- variables: Variables): Unit
-
- def executeRead(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables): DataFrame
-
- override def close(): Unit = ()
-
- def applyConf(conf: Map[String, String]): Unit = ()
-
- def applicationId(): String
-
- def executeSqlToVariables(sql: String): List[Map[String, String]] = List()
-
- def union(left: DataFrame, right: DataFrame): DataFrame
-}
-
-@Private
-object WorkflowInterpreter {
- def updateVariablesForRefreshAutoIncMode(jobLog: JobLog, variables: Variables): Unit = {
- if (jobLog.logDrivenType == IncrementalType.AUTO_INC_ID
- && variables.contains("${upperBound}")
- && jobLog.dataRangeEnd != "0".padding()
- ) {
- variables.put("${upperBound}", jobLog.dataRangeEnd.trimPadding())
- ETLLogger.info(s"Setting variables $${upperBound} to ${variables("${upperBound}")}")
- }
- }
-
- def replaceVariablesInSql(step: WorkflowStep,
- variables: Variables): Unit = {
- var selectSql = step.getSql
- if (selectSql != null) {
- variables.foreach {
- case (varName, varValue) =>
- selectSql = selectSql.replace(varName, varValue)
- }
- step.setSql(selectSql)
- }
- }
-
- def replaceVariablesInOptionsAndArgs(step: WorkflowStep,
- variables: Variables): Unit = {
- val newSourceOpts = replaceValues(step.getSourceConfig.getOptions.toMap, variables)
- step.getSourceConfig.setOptions(newSourceOpts)
-
- val newTargetOpts = replaceValues(step.getTargetConfig.getOptions.toMap, variables)
- step.getTargetConfig.setOptions(newTargetOpts)
-
- step.source match {
- case conf: TransformationDataSourceConfig =>
- val newSourceArgs = replaceValues(conf.getArgs.toMap, variables)
- conf.setArgs(Map(newSourceArgs.toSeq: _*))
- case _ => ()
- }
-
- step.target match {
- case conf: TransformationDataSourceConfig =>
- val newTargetArgs = replaceValues(conf.getArgs.toMap, variables)
- conf.setArgs(Map(newTargetArgs.toSeq: _*))
- case _ => ()
- }
- }
-
- def replaceValues(options: Map[String, String],
- variables: Variables): Map[String, String] = {
- options.map {
- case (key, value) =>
- var newValue = value
- variables.foreach {
- case (varName, varValue) =>
- newValue = newValue.replace(varName, varValue)
- }
- key -> newValue
- }
- }
-
- def replaceTemplateVariables(conf: DataSourceConfig, variables: Variables): Unit = {
- val clazz = conf.getClass
- val fields = FieldUtils.getAllFields(clazz).filter(
- it => !it.getName.contains("$init$") && it.getType.getName == "java.lang.String"
- )
- fields.foreach(replaceValue(_, variables, conf))
- }
-
- def replaceValue(field: Field, variables: Variables, conf: DataSourceConfig): Unit = {
- field.setAccessible(true)
- var value = field.get(conf).asInstanceOf[String]
- if (value != null) {
- variables.foreach { case (k, v) => value = value.replace(k, v) }
- field.set(conf, value)
- }
- }
-
- /**
- * 对sql template进行参数替换
- */
- def prepareSql(jobLog: JobLog, variables: Variables, start: String, end: String, step: WorkflowStep): Unit = {
- if (step.getSqlTemplate != null) {
- step.setSql(
- step
- .getSqlTemplate
- .replace("${JOB_ID}", jobLog.jobId.toString)
- .replace("${DATA_RANGE_START}", start)
- .replace("${DATA_RANGE_END}", end)
- )
- replaceVariablesInSql(step, variables.filter(it => it._1.startsWith("$")))
- }
- replaceVariablesInOptionsAndArgs(step, variables)
- replaceTemplateVariables(step.source, variables)
- replaceTemplateVariables(step.target, variables)
- }
-}
\ No newline at end of file
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/Command.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/Command.scala
deleted file mode 100644
index 22cc838..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/Command.scala
+++ /dev/null
@@ -1,278 +0,0 @@
-package com.github.sharpdata.sharpetl.core.cli
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.exception.Exception.FailedToParseExtraParamsException
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import org.apache.commons.io.FileUtils.byteCountToDisplaySize
-import picocli.CommandLine
-import picocli.CommandLine.ArgGroup
-
-import scala.collection.mutable
-
-abstract class CommonCommand extends Runnable {
- @CommandLine.Option(
- names = Array("--local"),
- description = Array("running in standalone mode"),
- required = false
- )
- var local: Boolean = false
-
- @CommandLine.Option(
- names = Array("--release-resource"),
- description = Array("Automatically release resource after job completion"),
- required = false
- )
- var releaseResource: Boolean = true
-
- @CommandLine.Option(
- names = Array("--skip-running"),
- description = Array("Handle the running job when scheduling flashed crash"),
- required = false
- )
- var skipRunning: Boolean = true
-
- @CommandLine.Option(
- names = Array("--default-start", "--default-start-time"),
- description = Array("Default start time(eg, 20210101000000)/incremental id of this job"),
- required = false
- )
- var defaultStart: String = _
-
- @CommandLine.Option(
- names = Array("--log-driven-type"),
- description = Array("log driven type"),
- required = false
- )
- var logDrivenType: String = _
-
- @CommandLine.Option(
- names = Array("--period"),
- description = Array("execute period of the job"),
- required = false
- )
- var period: Int = _
-
- @CommandLine.Option(
- names = Array("--once"),
- description = Array("only run the job once(for testing)"),
- required = false
- )
- var once: Boolean = false
-
- @CommandLine.Option(
- names = Array("--latest-only"),
- description = Array("only run the latest schedule(for full job)"),
- required = false
- )
- var latestOnly: Boolean = false
-
- @CommandLine.Option(
- names = Array("--refresh"),
- description = Array("Refresh data at a specific time"),
- required = false
- )
- var refresh: Boolean = false
-
- @CommandLine.Option(
- names = Array("--refresh-range-start"),
- description = Array("Refresh data range start"),
- required = false
- )
- var refreshRangeStart: String = _
-
- @CommandLine.Option(
- names = Array("--refresh-range-end"),
- description = Array("Refresh data range end"),
- required = false
- )
- var refreshRangeEnd: String = _
-
- @CommandLine.Option(
- names = Array("--from-step"),
- description = Array("(Re-)run from step"),
- required = false
- )
- var fromStep: String = _
-
- @CommandLine.Option(
- names = Array("--exclude-steps"),
- description = Array("exclude steps"),
- required = false
- )
- var excludeSteps: String = _
-
- @CommandLine.Option(
- names = Array("--property"),
- description = Array("specify property file location allowed url type [hdfs, file system, s3, oss, etc]"),
- required = false
- )
- var propertyPath: String = _
-
-
- @CommandLine.Option(
- names = Array("--env"),
- description = Array("env: local/test/dev/qa/prod"),
- required = false
- )
- var env: String = Environment.LOCAL
-
- @CommandLine.Option(
- names = Array("--override"),
- description = Array("--override=prop-a=zoo,prop-b=bar"),
- required = false
- )
- var `override`: String = _
-
- var extraParams: mutable.Map[String, String] = mutable.Map[String, String]()
-
- val commandStr: mutable.StringBuilder = new mutable.StringBuilder()
-
- protected def parseExtraOptions(): Unit = {
- try {
- if (!isNullOrEmpty(`override`)) {
- commandStr.append(s"--override=")
- `override`.split(",").map(it => it.split("=")).map(it => it(0) -> it.tail.mkString("=")).toMap
- .foreach { case (key, value) =>
- commandStr.append(s"$key=$value,")
- extraParams += (key -> value)
- }
- }
- } catch {
- case _: Throwable =>
- val message =
- s"""Failed to parse extraParams(parameter after --override) format should be [OPTION=VALUE]
- |current extra options: ${`override`}
- |""".stripMargin
- throw FailedToParseExtraParamsException(message)
- }
- }
-
- // scalastyle:off
- def formatCommand(): Unit = {
- if (!isNullOrEmpty(defaultStart) && defaultStart.contains(":")) {
- // 2021-09-24 00:00:00 => 20210924000000
- defaultStart = defaultStart.replace("-", "").replace(" ", "").replace(":", "")
- }
- commandStr.append(s"--local=$local \t")
- commandStr.append(s"--release-resource=$releaseResource \t")
- if (!isNullOrEmpty(defaultStart)) commandStr.append(s"--default-start=$defaultStart \t")
- if (!isNullOrEmpty(logDrivenType)) commandStr.append(s"--log-driven-type=$logDrivenType \t")
- if (period > 0) commandStr.append(s"--period=$period \t")
- commandStr.append(s"--once=$once \t")
- commandStr.append(s"--latest-only=$latestOnly \t")
- commandStr.append(s"--env=$env \t")
- commandStr.append(s"--skip-running=$skipRunning \t")
- if (refresh) {
- commandStr.append(s"--refresh=$refresh \t")
- commandStr.append(s"--refresh-range-start=$refreshRangeStart \t")
- commandStr.append(s"--refresh-range-end=$refreshRangeEnd \t")
- commandStr.append(s"--from-step=$fromStep \t")
- commandStr.append(s"exclude-steps=$excludeSteps \t")
- }
- parseExtraOptions()
- }
- // scalastyle:on
-
- def loggingJobParameters(): Unit = {
- formatCommand()
- println(
- """ .-. . . . .--. .--. .--- ----- .
- |( | | / \ | )| ) | | |
- | `-. |---| /___\ |--' |--' |--- | |
- | )| | / \ | \ | | | |
- | `-' ' '' `' `' '---' ' '---'
- |""".stripMargin)
- println(
- s"""
- |Java version: ${System.getProperty("java.version")}
- |Scala version: ${util.Properties.versionNumberString}
- |OS: ${System.getProperty("os.name")}
- |OS arch: ${System.getProperty("os.arch")}
- |OS version: ${System.getProperty("os.version")}
- |Available processors (cores): ${Runtime.getRuntime.availableProcessors()}
- |Free memory: ${byteCountToDisplaySize(Runtime.getRuntime.freeMemory())}
- |Maximum memory: ${byteCountToDisplaySize(Runtime.getRuntime.maxMemory())}
- |Total memory available to JVM: ${byteCountToDisplaySize(Runtime.getRuntime.totalMemory())}
- |""".stripMargin)
- ETLLogger.info(s"parameters: ${commandStr.toString()}")
- Environment.CURRENT = env
- ETLLogger.info(s"Job profile: ${Environment.CURRENT}")
- }
-}
-
-abstract class SingleJobCommand extends CommonCommand {
-
- @CommandLine.Option(
- names = Array("--name"),
- description = Array("name of the workflow"),
- required = true
- )
- var wfName: String = _
-
- @CommandLine.Option(
- names = Array("-h", "--help"),
- usageHelp = true,
- description = Array("Sample parameters: --name=your-sql-file-name-without-file-extension --period=1440")
- )
- var helpRequested = false
-
- override def formatCommand(): Unit = {
- commandStr.append(s"--name=$wfName \t")
- commandStr.append(s"--help=$helpRequested \t")
- super.formatCommand()
- }
-}
-
-class ExcelOptions {
- @CommandLine.Option(
- names = Array("-f", "--file"),
- description = Array("config file path"),
- required = true
- )
- var filePath: String = _
-}
-
-class SqlFileOptions {
- @CommandLine.Option(
- names = Array("-n", "--names"),
- description = Array("names of the workflow"),
- required = true,
- split = ","
- )
- var wfNames: Array[String] = _
-}
-
-abstract class BatchJobCommand extends CommonCommand {
-
- @ArgGroup(heading = "run batch job by excel%n")
- var excelOptions: ExcelOptions = _
-
- @ArgGroup(heading = "run batch job by sql file%n", exclusive = false)
- var sqlFileOptions: SqlFileOptions = _
-
- @CommandLine.Option(
- names = Array("--parallelism"),
- description = Array("control batch job parallelism"),
- required = false
- )
- var parallelism: Int = Runtime.getRuntime.availableProcessors()
-
- @CommandLine.Option(
- names = Array("-h", "--help"),
- usageHelp = true,
- description = Array("Sample parameters: -f=/path/to/config.xlsx or -n=job1,job2")
- )
- var helpRequested = false
-
- override def formatCommand(): Unit = {
- if (excelOptions != null) {
- commandStr.append(s"--file=$excelOptions.filePath \t")
- }
- if (sqlFileOptions != null) {
- commandStr.append(s"--names=${sqlFileOptions.wfNames.mkString(",")} \t")
- }
- commandStr.append(s"--parallelism=$parallelism \t")
- super.formatCommand()
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/UtilCommand.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/UtilCommand.scala
deleted file mode 100644
index 28f6aae..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/cli/UtilCommand.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package com.github.sharpdata.sharpetl.core.cli
-
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import org.apache.log4j.Level
-import picocli.CommandLine
-import picocli.CommandLine.Parameters
-
-
-@CommandLine.Command(name = "encrypt", description = Array("Encrypts the given string using the given key."))
-class EncryptionCommand extends UtilCommand {
-
- @CommandLine.Option(names = Array("-p", "--propertyFilePath"), description = Array("property file path"), required = true)
- var propertyFilePath: String = _
-
- @Parameters(index = "0", description = Array("target content"), defaultValue = "")
- var content: String = _
-
- override def run(): Unit = {
- super.run()
- ETLLogger.info(s"Original content is $content")
- ETLConfig.setPropertyPath(propertyFilePath)
- val encryptor = ETLConfig.encryptor.get
- val encryptedContent = encryptor.encrypt(content)
- ETLLogger.info(s"ENC($encryptedContent)")
- }
-
-}
-
-class UtilCommand extends Runnable {
- @CommandLine.Option(
- names = Array("-h", "--help"), usageHelp = true, description = Array("Display this help message")
- )
- var helpRequested = false
-
- override def run(): Unit = {
- if (helpRequested) CommandLine.usage(this, System.out)
- import org.apache.log4j.Logger
- Logger.getRootLogger.setLevel(Level.OFF)
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Sink.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Sink.scala
deleted file mode 100644
index dd512cb..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Sink.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-
-// scalastyle:off
-@Stable(since = "1.0.0")
-trait Sink[DataFrame] extends Serializable {
- def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit
-}
-// scalastyle:on
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Source.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Source.scala
deleted file mode 100644
index 58fa9b4..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Source.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-
-// scalastyle:off
-@Stable(since = "1.0.0")
-trait Source[DataFrame, Context] extends Serializable {
- def read(step: WorkflowStep, jobLog: JobLog, executionContext: Context, variables: Variables): DataFrame
-}
-// scalastyle:on
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/BigQueryDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/BigQueryDataSourceConfig.scala
deleted file mode 100644
index 2176ae7..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/BigQueryDataSourceConfig.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("bigquery"))
-class BigQueryDataSourceConfig extends DataSourceConfig with Serializable {
-
- @BeanProperty
- var system: String = ""
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CSVDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CSVDataSourceConfig.scala
deleted file mode 100644
index d2bf0b2..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CSVDataSourceConfig.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.{BooleanString, Encoding, Separator}
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("csv"))
-class CSVDataSourceConfig extends FileDataSourceConfig with Serializable {
-
- @BeanProperty
- var inferSchema: String = BooleanString.TRUE
-
- @BeanProperty
- var encoding: String = Encoding.UTF8
-
- @BeanProperty
- var sep: String = Separator.COMMA
-
- @BeanProperty
- var header: String = BooleanString.TRUE
-
- @BeanProperty
- var quote: String = "\""
-
- @BeanProperty
- var escape: String = "\""
-
- @BeanProperty
- var multiLine: String = BooleanString.FALSE
-
- @BeanProperty
- var ignoreTrailingWhiteSpace: String = BooleanString.FALSE
-
- @BeanProperty
- var selectExpr: String = "*"
-
- @BeanProperty
- var parseTimeFromFileNameRegex: String = ""
-
- @BeanProperty
- var parseTimeFormatPattern: String = ""
-
- @BeanProperty
- var parseTimeColumnName: String = "parsedTime"
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ClassDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ClassDataSourceConfig.scala
deleted file mode 100644
index 763086d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ClassDataSourceConfig.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("class", "object"))
-class ClassDataSourceConfig extends DataSourceConfig {
-
- /**
- * scala 类的 class path,例如:[[com.github.sharpdata.sharpetl.spark.udf.PmmlUDF]]
- */
- @BeanProperty
- var className: String = _
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CompressTarConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CompressTarConfig.scala
deleted file mode 100644
index 5ffae75..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/CompressTarConfig.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.Encoding
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("compresstar"))
-class CompressTarConfig extends FileDataSourceConfig with Serializable {
-
- @BeanProperty
- var encoding: String = Encoding.UTF8
-
- @BeanProperty
- var targetPath: String = ""
-
- @BeanProperty
- var tarPath: String = ""
-
- @BeanProperty
- var bakPath: String = ""
-
- @BeanProperty
- var tmpPath: String = ""
-
- @BeanProperty
- var isPassEmptyFile: String = false.toString
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DBDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DBDataSourceConfig.scala
deleted file mode 100644
index 7a568cc..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DBDataSourceConfig.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.{JdbcDefaultOptions, StringUtil}
-
-import scala.beans.BeanProperty
-
-class DBDataSourceConfig extends DataSourceConfig {
- @BeanProperty
- var connectionName: String = _
-
- @BeanProperty
- var dbName: String = _
-
- @BeanProperty
- var tableName: String = _
-
- @BeanProperty
- var batchSize: String = 1024.toString
-
- // The maximum number of partitions that can be used for parallelism in table reading and writing.
- @BeanProperty
- var numPartitions: String = _
-
- // They describe how to partition the table when reading in parallel from multiple workers.
- @BeanProperty
- var partitionColumn: String = _
-
- // lowerBound is just used to decide the partition stride, not for filtering the rows in table.
- @BeanProperty
- var lowerBound: String = _
-
- // upperBound is just used to decide the partition stride, not for filtering the rows in table.
- @BeanProperty
- var upperBound: String = _
-
- // primary keys, only used by ElasticSearch
- @BeanProperty
- var primaryKeys: String = _
-
- // use transaction or not, false by default
- @BeanProperty
- var transaction: String = false.toString
-
- // scalastyle:off
- override def toString: String = {
- val builder = new StringBuilder()
-
- if (!StringUtil.isNullOrEmpty(connectionName)) builder.append(s"-- connectionName=$connectionName$ENTER")
- if (!StringUtil.isNullOrEmpty(dbName)) builder.append(s"-- dbName=$dbName$ENTER")
- if (!StringUtil.isNullOrEmpty(tableName)) builder.append(s"-- tableName=$tableName$ENTER")
- if (!StringUtil.isNullOrEmpty(batchSize) && batchSize != JdbcDefaultOptions.BATCH_SIZE.toString) builder.append(s"-- batchSize=$batchSize$ENTER")
- if (!StringUtil.isNullOrEmpty(numPartitions)) builder.append(s"-- numPartitions=$numPartitions$ENTER")
- if (!StringUtil.isNullOrEmpty(partitionColumn)) builder.append(s"-- partitionColumn=$partitionColumn$ENTER")
- if (!StringUtil.isNullOrEmpty(lowerBound)) builder.append(s"-- lowerBound=$lowerBound$ENTER")
- if (!StringUtil.isNullOrEmpty(upperBound)) builder.append(s"-- upperBound=$upperBound$ENTER")
- if (!StringUtil.isNullOrEmpty(primaryKeys)) builder.append(s"-- primaryKeys=$primaryKeys$ENTER")
- if (!transaction.equalsIgnoreCase(false.toString)) builder.append(s"-- transaction=$transaction$ENTER")
- builder.append(optionsToString)
- builder.toString
- }
- // scalastyle:on
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DataSourceConfig.scala
deleted file mode 100644
index 9148833..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DataSourceConfig.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.syntax.Formatable
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-
-import scala.beans.BeanProperty
-
-// just for annotation scan
-trait DataSourceConf
-
-@Stable(since = "1.0.0")
-class DataSourceConfig extends Formatable with DataSourceConf {
-
- // ConfigFor,For optional values, see [[com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType]]
- @BeanProperty
- var dataSourceType: String = _
-
- @BeanProperty
- var options: Map[String, String] = Map[String, String]()
-
- // value of this field should be like this: day:${DAY};month:${MONTH}
- @BeanProperty
- var derivedColumns: String = _
-
- def optionsToString: String = {
- if (options != null && options.nonEmpty) {
- val builder = new StringBuilder()
- builder.append(s"-- options$ENTER")
- options.foreach { case (key, value) => builder.append(s"-- $key=$value$ENTER") }
- builder.toString()
- } else {
- ""
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DeltaLakeDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DeltaLakeDataSourceConfig.scala
deleted file mode 100644
index 03c7569..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/DeltaLakeDataSourceConfig.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-@configFor(types = Array("delta_lake"))
-class DeltaLakeDataSourceConfig extends DBDataSourceConfig with Serializable
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ExcelDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ExcelDataSourceConfig.scala
deleted file mode 100644
index 6e352cd..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/ExcelDataSourceConfig.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("excel"))
-class ExcelDataSourceConfig extends FileDataSourceConfig with Serializable {
-
- // 是否包含表头,默认 true
- @BeanProperty
- var header: String = BooleanString.TRUE
-
- // 是否转换空值为 null,默认 false
- @BeanProperty
- var treatEmptyValuesAsNulls: String = BooleanString.TRUE
-
- // 启用结构推断,默认 false
- @BeanProperty
- var inferSchema: String = BooleanString.FALSE
-
- // 是否额外添加列颜色字段,默认 false
- @BeanProperty
- var addColorColumns: String = BooleanString.FALSE
-
- // 数据地址,默认 A1 ,可部分设置,只设置 sheet 页或只设置开始单元格位置都可以
- // 例:'Sheet2'!A1:D3
- // sheet 页名称 开始单元格位置 终止单元格位置
- @BeanProperty
- var dataAddress: String = _
-
- // 默认 yyyy-mm-dd hh:mm:ss[.fffffffff]
- @BeanProperty
- var timestampFormat: String = "MM-dd-yyyy HH:mm:ss"
-
- // 读取超大文档可设置此值,会使用 streaming reader
- @BeanProperty
- var maxRowsInMemory: String = _
-
- // 结构推断时使用的数据行数,默认 10
- @BeanProperty
- // scalastyle:off
- var excerptSize: Int = 10
- // scalastyle:on
-
- // 文档密码,默认 null
- @BeanProperty
- var workbookPassword: String = _
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/FileDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/FileDataSourceConfig.scala
deleted file mode 100644
index 4883842..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/FileDataSourceConfig.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-
-import scala.beans.BeanProperty
-
-class FileDataSourceConfig extends DataSourceConfig with Serializable {
-
- // 文件类型系统的配置前缀,一般为系统缩写
- @BeanProperty
- var configPrefix: String = _
-
- // 文件目录(此处配置会覆盖 application.properties 中的 ....ftp.dir、....hdfs.dir )
- @BeanProperty
- var fileDir: String = _
-
- // 文件名称正则 三段式(前缀、文件名、后缀)
- @BeanProperty
- var fileNamePattern: String = ".*"
-
- // 文件过滤函数
- @BeanProperty
- var fileFilterFunc: String = _
-
- // 文件具体路径(无需配置,任务运行过程中自动查找)
- @BeanProperty
- var filePath: String = _
-
- /**
- * file path list, split by ',', if [[FileDataSourceConfig.filePaths]] was defined,
- * [[FileDataSourceConfig.fileNamePattern]] will be ignored.
- */
- @BeanProperty
- var filePaths: String = _
-
- // 加载完成后是否删除源数据,默认不删
- @BeanProperty
- var deleteSource: String = BooleanString.FALSE
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/HttpDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/HttpDataSourceConfig.scala
deleted file mode 100644
index ae7c9a1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/HttpDataSourceConfig.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("http"))
-class HttpDataSourceConfig extends DataSourceConfig with Serializable {
-
- @BeanProperty
- var connectionName: String = _
-
- @BeanProperty
- var url: String = _
-
- @BeanProperty
- var httpMethod: String = "GET"
-
- @BeanProperty
- var timeout: String = _
-
- @BeanProperty
- var requestBody: String = _
-
- @BeanProperty
- var fieldName: String = "value"
-
- @BeanProperty
- var jsonPath: String = "$"
-
- @BeanProperty
- var splitBy: String = ""
-}
-
-@configFor(types = Array("http_file"))
-class HttpFileDataSourceConfig extends HttpDataSourceConfig {
-
- @BeanProperty
- var tempDestinationDir = "/tmp"
-
- @BeanProperty
- var hdfsDir = "/tmp"
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/JsonDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/JsonDataSourceConfig.scala
deleted file mode 100644
index d6d1f91..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/JsonDataSourceConfig.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("json"))
-class JsonDataSourceConfig extends FileDataSourceConfig with Serializable {
-
- @BeanProperty
- var multiline: String = BooleanString.FALSE
-
- // 默认非严格模式
- @BeanProperty
- var mode: String = "PERMISSIVE"
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/PmmlDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/PmmlDataSourceConfig.scala
deleted file mode 100644
index 8b1f82a..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/PmmlDataSourceConfig.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("pmml"))
-class PmmlDataSourceConfig extends ClassDataSourceConfig with Serializable {
-
- // pmml 模型文件名称(包含后缀)
- @BeanProperty
- var pmmlFileName: String = _
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/RemoteFileDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/RemoteFileDataSourceConfig.scala
deleted file mode 100644
index a257d0b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/RemoteFileDataSourceConfig.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("sftp", "mount"))
-class RemoteFileDataSourceConfig extends FileDataSourceConfig {
- @BeanProperty
- var sourceDir: String = _
-
- @BeanProperty
- var readAll: String = BooleanString.FALSE
-
- @BeanProperty
- var tempDestinationDir: String = _
-
- @BeanProperty
- var tempDestinationDirPermission: String = "rw-rw----"
-
- @BeanProperty
- var hdfsDir: String = _
-
- @BeanProperty
- var filterByTime: String = BooleanString.TRUE
-
- @BeanProperty
- var timeZone: String = "GMT+8"
-
- // 是否中断后面步骤当数据或文件为空
- @BeanProperty
- var breakFollowStepWhenEmpty: String = BooleanString.TRUE
-
- @BeanProperty
- var dos2unix: String = BooleanString.FALSE
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/StreamingDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/StreamingDataSourceConfig.scala
deleted file mode 100644
index 07c190b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/StreamingDataSourceConfig.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.datasource.config.KafkaDataFormat.JSON
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType
-
-import scala.beans.BeanProperty
-
-sealed trait StreamingDataSourceConfig extends DataSourceConfig {
- // 微批执行间隔:秒
- @BeanProperty
- var interval: String = _
-}
-
-trait KafkaDataSourceConfig extends Serializable {
- // topic,多个 topic 逗号分隔
- @BeanProperty
- var topics: String = _
-
- @BeanProperty
- var groupId: String = _
-
- @BeanProperty
- var format: String = JSON
-
- // json 中需要解析的字段定义
- @BeanProperty
- var schemaDDL: String = _
-
- // topic 中需要获取的除value外其他的字段
- @BeanProperty
- var topicMessageColumns: String = _
-
-
- //是否开启序列化和反序列化
- @BeanProperty
- var enableSerDes: String = "true"
-}
-
-@configFor(types = Array("streaming_kafka"))
-final class StreamingKafkaDataSourceConfig extends StreamingDataSourceConfig with KafkaDataSourceConfig
-
-@configFor(types = Array("batch_kafka"))
-final class BatchKafkaDataSourceConfig extends DataSourceConfig with KafkaDataSourceConfig
-
-object KafkaDataFormat {
- val JSON: String = "JSON"
- val AVRO: String = "AVRO"
-}
-
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TextFileDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TextFileDataSourceConfig.scala
deleted file mode 100644
index a2aaa61..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TextFileDataSourceConfig.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.{BooleanString, Encoding}
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("ftp", "hdfs", "scp"))
-class TextFileDataSourceConfig extends FileDataSourceConfig with Serializable {
-
- // 文件编码,默认 utf-8
- @BeanProperty
- var encoding: String = Encoding.UTF8
-
- // 压缩格式后缀,默认不压缩
- @BeanProperty
- var codecExtension: String = ""
-
- // 是否解压后读取
- @BeanProperty
- var decompress: String = BooleanString.FALSE
-
- // 分隔符,separator 和 fieldLengthConfig 只需配置一项
- @BeanProperty
- var separator: String = _
-
- // 各字段长度配置,separator 和 fieldLengthConfig 只需配置一项
- @BeanProperty
- var fieldLengthConfig: String = _
-
- // 文件列数是否需要严格一致,默认不需要
- // 例如:源文件包含 30 个字段,目标表需要 20 个字段,非严格模式直接取文件前 20 列,严格模式会过滤掉列数不一致的数据行
- @BeanProperty
- var strictColumnNum: String = BooleanString.FALSE
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TransformationDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TransformationDataSourceConfig.scala
deleted file mode 100644
index d9ee88a..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/TransformationDataSourceConfig.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.Constants.TransformerType
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("transformation"))
-class TransformationDataSourceConfig extends DataSourceConfig with Serializable {
- var className: String = _
- var methodName: String = _
- var transformerType: String = TransformerType.OBJECT_TYPE
-
- /**
- * Args for function call
- */
- @BeanProperty
- var args: Map[String, String] = _
-
- // scalastyle:off
- override def toString: String = {
- val builder = new StringBuilder()
-
- if (!StringUtil.isNullOrEmpty(className)) builder.append(s"-- className=$className$ENTER")
- if (!StringUtil.isNullOrEmpty(methodName)) builder.append(s"-- methodName=$methodName$ENTER")
- if (args != null && args.nonEmpty) {
- args.toList.sortBy(_._1).foreach { case (key, value) => builder.append(s"-- $key=$value$ENTER") }
- }
- if (!StringUtil.isNullOrEmpty(transformerType)) builder.append(s"-- transformerType=$transformerType$ENTER")
- builder.append(optionsToString)
- builder.toString
- }
- // scalastyle:on
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/UDFDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/UDFDataSourceConfig.scala
deleted file mode 100644
index 729415d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/UDFDataSourceConfig.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-import scala.beans.BeanProperty
-
-@configFor(types = Array("udf"))
-class UDFDataSourceConfig extends DataSourceConfig with Serializable {
-
- // 需注册的方法名
- @BeanProperty
- var methodName: String = _
-
- // 需注册的 udf 名称,注册后可在 sql 中调用此名称执行注册的方法
- @BeanProperty
- var udfName: String = _
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/VariableDataSourceConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/VariableDataSourceConfig.scala
deleted file mode 100644
index 51936b1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/config/VariableDataSourceConfig.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.config
-
-import com.github.sharpdata.sharpetl.core.annotation.configFor
-
-@configFor(types = Array("variables"))
-class VariableDataSourceConfig extends DataSourceConfig() with Serializable {
- // scalastyle:off
- override def toString: String = {
- this.dataSourceType = "variables"
- val builder = new StringBuilder()
-
- builder.toString
- }
- // scalastyle:on
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/FtpConnection.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/FtpConnection.scala
deleted file mode 100644
index e45798d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/FtpConnection.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.connection
-
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, StringUtil}
-
-import scala.beans.BeanProperty
-
-class FtpConnection(prefix: String = "") {
-
- private val _prefix = StringUtil.getPrefix(prefix)
-
- @BeanProperty
- var host: String = ETLConfig.getProperty(s"${_prefix}ftp.host")
-
- @BeanProperty
- var port: Int = ETLConfig.getProperty(s"${_prefix}ftp.port").toInt
-
- @BeanProperty
- var user: String = ETLConfig.getProperty(s"${_prefix}ftp.user")
-
- @BeanProperty
- var password: String = ETLConfig.getProperty(s"${_prefix}ftp.password")
-
- @BeanProperty
- var dir: String = ETLConfig.getProperty(s"${_prefix}ftp.dir")
-
- @BeanProperty
- var localTempDir: String = ETLConfig.getProperty(s"${_prefix}ftp.localTempDir", "/tmp/ftp")
-
- @BeanProperty
- var hdfsTempDir: String = ETLConfig.getProperty(s"${_prefix}ftp.hdfsTempDir", "/tmp/ftp")
-
- val url = s"ftp://$user:$password@$host:$port"
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/ScpConnection.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/ScpConnection.scala
deleted file mode 100644
index 7dfa603..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/ScpConnection.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.connection
-
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, StringUtil}
-
-import scala.beans.BeanProperty
-
-class ScpConnection(prefix: String) {
-
- private val _prefix = StringUtil.getPrefix(prefix)
-
- @BeanProperty
- var host: String = ETLConfig.getProperty(s"${_prefix}scp.host")
-
- @BeanProperty
- val port: Int = ETLConfig.getProperty(s"${_prefix}scp.port").toInt
-
- @BeanProperty
- var user: String = ETLConfig.getProperty(s"${_prefix}scp.user")
-
- @BeanProperty
- var password: String = ETLConfig.getProperty(s"${_prefix}scp.password")
-
- @BeanProperty
- var dir: String = ETLConfig.getProperty(s"${_prefix}scp.dir")
-
- @BeanProperty
- var localTempDir: String = ETLConfig.getProperty(s"${_prefix}scp.localTempDir", "/tmp/scp")
-
- @BeanProperty
- var hdfsTempDir: String = ETLConfig.getProperty(s"${_prefix}scp.hdfsTempDir", "/tmp/scp")
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/SftpConnection.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/SftpConnection.scala
deleted file mode 100644
index dc4fb87..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/connection/SftpConnection.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.github.sharpdata.sharpetl.core.datasource.connection
-
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, StringUtil}
-
-class SftpConnection(prefix: String) {
-
- private val _prefix = StringUtil.getPrefix(prefix)
-
- val username: String = ETLConfig.getProperty(s"${_prefix}sftp.username")
- val password: String = ETLConfig.getProperty(s"${_prefix}sftp.password")
- val host: String = ETLConfig.getProperty(s"${_prefix}sftp.host")
- val port: Int = ETLConfig.getProperty(s"${_prefix}sftp.port").toInt
- val proxyHost: String = ETLConfig.getProperty(s"${_prefix}sftp.proxyHost")
- val proxyPort: String = ETLConfig.getProperty(s"${_prefix}sftp.proxyPort")
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/exception/Exception.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/exception/Exception.scala
deleted file mode 100644
index 6347807..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/exception/Exception.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package com.github.sharpdata.sharpetl.core.exception
-
-import java.io.{PrintWriter, StringWriter}
-
-object Exception {
-
- final case class EmptyDataException(message: String, step: String) extends Exception(message)
-
- final case class IncrementalDiffModeTooMuchDataException(message: String) extends RuntimeException(message)
-
- final case class StepFailedException(throwable: Throwable, step: String) extends Exception(throwable)
-
- final case class AnotherJobIsRunningException(message: String) extends RuntimeException(message)
-
- final case class JobDependenciesError(message: String) extends RuntimeException(message)
-
- final case class IncompleteDataSourceException(message: String) extends Exception(message)
-
- final case class DataQualityCheckRuleMissingException(message: String) extends RuntimeException(message)
-
- final case class BadDataQualityCheckRuleException(message: String) extends RuntimeException(message)
-
- final case class MissingConfigurationException(name: String) extends RuntimeException(s"configuration $name is missing")
-
- final case class UnsupportedStreamingDataSourceException(name: String)
- extends RuntimeException(s"datasource $name is not supported with streaming job")
-
- final case class FileDataSourceConfigErrorException(msg: String) extends RuntimeException(msg)
-
- final case class CanNotLoadPropertyFileException(message: String, e: Throwable) extends RuntimeException(message, e)
-
- final case class FailedToParseExtraParamsException(message: String) extends RuntimeException(message)
-
- final case class SheetNotFoundException(message: String) extends RuntimeException(message)
-
- final case class CellNotFoundException(headerName: String)
- extends RuntimeException(s"header `$headerName` does not exist in current excel sheet")
-
- final case class NoFileFoundException(step: String) extends RuntimeException("No file need to executed!")
-
- final case class NoFileToContinueException(step: String) extends RuntimeException("No file found to execute follow step!")
-
- final case class NoFileSkipException(step: String) extends RuntimeException("No file found, then skip follow step!")
-
- final case class InvalidSqlException(msg: String) extends RuntimeException(msg)
-
- final case class DuplicatedSqlScriptException(msg: String) extends RuntimeException(msg)
-
- final case class WorkFlowSyntaxException(msg: String) extends RuntimeException(msg)
-
- final class CheckFailedException(msg: String) extends RuntimeException(msg)
-
- def throwableAsString(t: Throwable): String = {
- val sw = new StringWriter
- t.printStackTrace(new PrintWriter(sw))
- sw.toString
- }
- final class PartitionNotFoundException(msg: String) extends RuntimeException(msg)
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/extension/BuiltInFunctions.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/extension/BuiltInFunctions.scala
deleted file mode 100644
index 1a3a3c5..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/extension/BuiltInFunctions.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.github.sharpdata.sharpetl.core.extension
-
-import javax.annotation.Nullable
-
-
-/**
- * [[com.github.sharpdata.sharpetl.spark.extension.Initializer]]
- */
-object BuiltInFunctions extends Serializable {
- /**
- * All value of UDF might be null, suggest using @[[javax.annotation.Nullable]]
- *
- */
- def powerNullCheck(@Nullable value: String): Boolean = {
- value == null || value.trim.equalsIgnoreCase("null")
- }
-
- def flatten(nestedArray: Seq[Seq[String]]): Seq[String] = nestedArray.flatten
-
- def arrayJoin(array: Seq[String], sep: String): String = {
- array.mkString(sep)
- }
-
- def top(array: Seq[String], n: Int): Seq[String] = {
- array.take(n)
- }
-
- def ifEmpty(@Nullable value: String, default: String): String = {
- if (value == null || value.isEmpty) {
- default
- } else {
- value
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtil.scala
deleted file mode 100644
index 295bdfa..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtil.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-package com.github.sharpdata.sharpetl.core.notification
-
-import com.github.sharpdata.sharpetl.core.api.WfEvalResult
-import com.github.sharpdata.sharpetl.core.notification.sender.{NotificationFactory, NotificationType}
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.{JobLog, JobStatus}
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.core.notification.sender.email.{Email, Sender}
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{L_YYYY_MM_DD_HH_MM_SS, YYYYMMDDHHMMSS}
-import com.github.sharpdata.sharpetl.core.util.JobLogUtil.JogLogExternal
-import com.google.common.base.Strings.isNullOrEmpty
-
-import java.time.LocalDateTime
-
-class NotificationUtil(val jobLogAccessor: JobLogAccessor) {
-
- lazy val emailSender: String = ETLConfig.getProperty("notification.email.sender")
- lazy val emailSenderPersonalName: String = ETLConfig.getProperty("notification.email.senderPersonalName")
- lazy val summaryJobReceivers: String = ETLConfig.getProperty("notification.email.summaryReceivers")
-
- def notify(jobResults: Seq[WfEvalResult]): Unit = {
- val configToLogs: Seq[(NotifyConfig, Seq[JobLog])] =
- jobResults
- .filterNot(it => it.workflow.notifies == null || it.workflow.notifies.isEmpty)
- .flatMap(it =>
- it.workflow.notifies
- .flatMap(_.toConfigs())
- .map(n => (n, it.jobLogs.map(_.get)))
- .map { case (config, logs) =>
- (config,
- logs.filter(shouldNotify(config, _)))
- }
- )
-
- configToLogs
- .map(it => (it._1, it._2.map(buildJobMessage)))
- .groupBy(_._1)
- .foreach(it => {
- val messages = it._2
- .flatMap(_._2)
- .map(_.toString)
- .mkString("\n\n")
- ETLLogger.info(s"Notification message:\n $messages")
-
- if (!isNullOrEmpty(messages)) {
- val notification = it._1.notifyType match {
- case NotificationType.EMAIL =>
- new Email(Sender(emailSender, emailSenderPersonalName),
- it._1.recipient, s"[${Environment.CURRENT.toUpperCase}] ETL job summary report", messages)
- case _ => ???
- }
- NotificationFactory.sendNotification(notification)
- }
- })
- }
-
- def shouldNotify(notifyConfig: NotifyConfig, jobLog: JobLog): Boolean = {
- if (notifyConfig.triggerCondition != NotifyTriggerCondition.FAILURE) {
- notifyConfig.accept(jobLog)
- } else if (!notifyConfig.accept(jobLog)) {
- false
- } else {
- val previousJobLog = jobLogAccessor.getPreviousJobLog(jobLog)
- previousJobLog == null || previousJobLog.status != JobStatus.FAILURE
- }
- }
-
- def buildJobMessage(jobLog: JobLog): JobMessage = {
-
- JobMessage(
- jobId = jobLog.jobId,
- jobName = jobLog.workflowName,
- jobRangeStart = scala.util.Try(LocalDateTime.parse(jobLog.dataRangeStart, YYYYMMDDHHMMSS)) match {
- case scala.util.Failure(_) => jobLog.dataRangeStart
- case scala.util.Success(value) => value.format(L_YYYY_MM_DD_HH_MM_SS)
- },
- jobRangeEnd = scala.util.Try(LocalDateTime.parse(jobLog.dataRangeEnd, YYYYMMDDHHMMSS)) match {
- case scala.util.Failure(_) => jobLog.dataRangeEnd
- case scala.util.Success(value) => value.format(L_YYYY_MM_DD_HH_MM_SS)
- },
- jobStatus = jobLog.status,
- errorMessage = jobLog.errorMessage(),
- failStep = jobLog.failStep(),
- applicationId = jobLog.applicationId,
- projectName = jobLog.projectName,
- dataFlow = jobLog.dataFlow(),
- duration = jobLog.duration().toString,
- jobStartTime = jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS)
- )
- }
-}
-
-final case class JobMessage(jobId: String,
- jobName: String,
- jobRangeStart: String,
- jobRangeEnd: String,
- jobStatus: String,
- errorMessage: String,
- failStep: String,
- applicationId: String,
- projectName: String,
- dataFlow: String,
- duration: String,
- jobStartTime: String) {
-
- override def toString: String = {
- val text =
- s"""
- |projectName: $projectName,
- |jobId: $jobId,
- |workflowName: $jobName,
- |jobRangeStart: $jobRangeStart,
- |jobRangeEnd: $jobRangeEnd,
- |jobStatus: $jobStatus,
- |dataFlow: $dataFlow,
- |applicationId: $applicationId
- |""".stripMargin
- if (jobStatus == JobStatus.FAILURE) {
- text +
- s"""
- |failStep: $failStep
- |errorMessage: $errorMessage""".stripMargin
- } else {
- text
- }
- }
-}
-
-object NotifyTriggerCondition {
- val FAILURE: String = "FAILURE"
- val ALWAYS: String = "ALWAYS"
- val SUCCESS: String = "SUCCESS"
-}
-
-final case class NotifyConfig(notifyType: String,
- recipient: String,
- triggerCondition: String) {
-
- def accept(jobLog: JobLog): Boolean = {
- triggerCondition == NotifyTriggerCondition.ALWAYS || triggerCondition == jobLog.status
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/NotificationFactory.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/NotificationFactory.scala
deleted file mode 100644
index 3328fe9..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/NotificationFactory.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.github.sharpdata.sharpetl.core.notification.sender
-
-import com.github.sharpdata.sharpetl.core.notification.sender.email.{EmailSender, EmailSenderConfiguration}
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-
-object NotificationFactory {
-
- val senders: Map[String, NotificationSender] = initAllSender()
-
- def sendNotification(notification: Notification): Unit = {
- notification.notificationType match {
- case NotificationType.EMAIL => senders(NotificationType.EMAIL).send(notification)
- case _ => ???
- }
- }
-
- def initAllSender(): Map[String, NotificationSender] = {
- if (ETLConfig.getProperties("notification.smtp").nonEmpty) {
- val smtpProps = ETLConfig.getProperties("notification.smtp")
- Map(NotificationType.EMAIL ->
- new EmailSender(EmailSenderConfiguration.init(smtpProps)))
- } else {
- Map.empty
- }
- }
-}
-
-
-trait NotificationSender {
- def send(notification: Notification): Unit
-}
-
-abstract class Notification(val message: String, val notificationType: String)
-
-object NotificationType {
- val EMAIL = "email"
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/email/EmailSender.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/email/EmailSender.scala
deleted file mode 100644
index ac499bd..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/notification/sender/email/EmailSender.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-package com.github.sharpdata.sharpetl.core.notification.sender.email
-
-import com.github.sharpdata.sharpetl.core.notification.sender.{Notification, NotificationSender, NotificationType}
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-
-import java.util.{Date, Properties}
-import javax.activation.DataHandler
-import javax.mail.internet.{InternetAddress, MimeBodyPart, MimeMessage, MimeMultipart}
-import javax.mail.util.ByteArrayDataSource
-import javax.mail.{Address, Message, Session, Transport}
-
-// $COVERAGE-OFF$
-class EmailSender(val emailSenderConfiguration: EmailSenderConfiguration) extends NotificationSender {
-
- override def send(notification: Notification): Unit = {
-
- val email = notification.asInstanceOf[Email]
- val props = emailSenderConfiguration.toSessionProperties()
-
- val session = Session.getInstance(props)
- try {
- val message = new MimeMessage(session)
- message.addHeader("Content-Transfer-Encoding", "8bit")
-
- message.setFrom(new InternetAddress(email.sender.address, email.sender.personalName))
- message.setReplyTo(InternetAddress.parse(email.sender.address, false).map(it => it.asInstanceOf[Address]))
-
- val multipart = new MimeMultipart()
-
- val emailBody = new MimeBodyPart()
- emailBody.setText(email.body)
- multipart.addBodyPart(emailBody)
-
- if (email.attachment.isDefined) {
- val attachment = new MimeBodyPart()
- val dataSource = new ByteArrayDataSource(
- email.attachment.get.content.getBytes(),
- email.attachment.get.mimeType)
- attachment.setDataHandler(new DataHandler(dataSource))
- attachment.setFileName(email.attachment.get.fileName)
- multipart.addBodyPart(attachment)
- }
-
- message.setContent(multipart)
- message.setSubject(email.subject)
- message.setSentDate(new Date())
- message.setRecipients(Message.RecipientType.TO, InternetAddress.parse(email.receiver, false).map(it => it.asInstanceOf[Address]))
-
- Transport.send(message)
- } catch {
- case e: Throwable =>
- ETLLogger.error("Failed to send email", e)
- throw e
- }
- }
-}
-
-class Email(val sender: Sender,
- val receiver: String,
- val subject: String,
- val body: String,
- val attachment: Option[EmailAttachment] = Option.empty) extends Notification(body, NotificationType.EMAIL)
-
-
-final case class Sender(address: String, personalName: String)
-
-
-class EmailAttachment(val content: String,
- val mimeType: String,
- val fileName: String)
-
-class EmailSenderConfiguration(
- val host: String,
- val port: Int
- ) {
- def toSessionProperties(): Properties = {
- val props = new Properties()
- props.put("mail.smtp.host", host)
- props.put("mail.smtp.port", port.toString)
- props
- }
-}
-
-object EmailSenderConfiguration {
-
- private val defaultPort = 25
- private val defaultHost = "localhost"
-
-
- def init(props: Map[String, String]): EmailSenderConfiguration = {
- new EmailSenderConfiguration(
- props.getOrElse("host", defaultHost),
- props.get("port").map(s => s.toInt).getOrElse(defaultPort)
- )
- }
-
-
-}
-// $COVERAGE-ON$
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/DataQualityConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/DataQualityConfig.scala
deleted file mode 100644
index 4283f52..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/DataQualityConfig.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality
-
-object ErrorType extends Serializable {
- val warn = "warn"
- val error = "error"
-}
-
-final case class DataQualityConfig(column: String, dataCheckType: String, rule: String, errorType: String)
-
-final case class DataQualityCheckResult(column: String, dataCheckType: String, ids: String, errorType: String, warnCount: Long, errorCount: Long)
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheck.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheck.scala
deleted file mode 100644
index 8c58fa1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheck.scala
+++ /dev/null
@@ -1,348 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.exception.Exception.DataQualityCheckRuleMissingException
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck._
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.ReflectUtil.reflectObjectMethod
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, StringUtil}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuidName
-import com.google.common.base.Strings.isNullOrEmpty
-
-// scalastyle:off
-case class CheckResult[DataFrame](warn: Seq[DataQualityCheckResult], error: Seq[DataQualityCheckResult], passed: DataFrame)
-
-final case class CheckStep[DataFrame](warn: Seq[DataQualityCheckResult], error: Seq[DataQualityCheckResult], sql: String) {
- def union(other: CheckStep[DataFrame], passed: DataFrame): CheckResult[DataFrame] = {
- CheckResult(warn ++ other.warn, error ++ other.error, passed)
- }
-
- def union(others: Seq[CheckStep[DataFrame]], passed: DataFrame): CheckResult[DataFrame] = {
- val warns = others.map(_.warn).fold(warn)(_ ++ _)
- val errors = others.map(_.error).fold(error)(_ ++ _)
- CheckResult(warns, errors, passed)
- }
-}
-
-
-@Stable(since = "1.0.0")
-trait QualityCheck[DataFrame] extends Serializable {
- val dataQualityCheckRules: Map[String, QualityCheckRule]
- val qualityCheckAccessor: QualityCheckAccessor
-
- def qualityCheck(step: WorkflowStep, jobId: String, jobScheduleId: String,
- df: DataFrame): CheckResult[DataFrame] = {
- val idColumn = step.source.options.getOrElse("idColumn", "id")
- val sortColumn = step.source.options.getOrElse("sortColumn", "")
- val desc = step.source.options.getOrElse("desc", "true")
- val dataQualityConfigs = parseQualityConfig(step)
- val configs = dataQualityConfigs.groupBy(_.rule.startsWith(UserDefinedRule.PREFIX))
-
- val udrConfigs = configs.getOrElse(true, Seq())
- val sqlConfigs = configs.getOrElse(false, Seq())
-
- val tempViewName = uuidName()
- val preWindowedViewName = tempViewName + "_pre_windowed"
- createView(df, preWindowedViewName)
- val distinctDf = dropUnusedCols(execute(windowByPkSql(preWindowedViewName, idColumn, sortColumn, desc.toBoolean)), "__row_num")
- createView(distinctDf, tempViewName)
-
- val topN = step.source.options.getOrElse("topN", DEFAULT_TOP_N.toString).toInt
-
- val checkDuplicateStep = checkDuplicate(preWindowedViewName, idColumn, sortColumn, desc.toBoolean, topN)
-
- val sqlStep: CheckStep[DataFrame] =
- if (sqlConfigs.isEmpty) {
- CheckStep(Seq(), Seq(), StringUtil.EMPTY)
- } else {
- check(tempViewName, sqlConfigs, idColumn, topN)
- }
-
- val (udrStep: CheckStep[DataFrame], views: Seq[String]) =
- if (udrConfigs.isEmpty) {
- (CheckStep(Seq(), Seq(), StringUtil.EMPTY), List())
- } else {
- checkUDR(tempViewName, udrConfigs, idColumn, topN)
- }
-
- val resultView = s"${tempViewName}_result"
- val passed: DataFrame = execute(generateAntiJoinSql(sqlStep.sql, udrStep.sql, tempViewName))
-
- val result = sqlStep.union(Seq(checkDuplicateStep, udrStep), passed)
-
- ETLLogger.warn(s"Found ${result.warn.size} warn(s) in job $jobScheduleId")
- ETLLogger.error(s"Found ${result.error.size} error(s) in job $jobScheduleId")
- recordCheckResult(jobId, jobScheduleId, result.error ++ result.warn ++ checkDuplicateStep.error)
- dropView(tempViewName)
- dropView(resultView)
- dropView(preWindowedViewName)
- views.foreach(dropView)
- result
- }
-
- def parseQualityConfig(step: WorkflowStep): Seq[DataQualityConfig] =
- step.source.options
- .filter { case (k, _) => k.startsWith("column") }
- .flatMap { case (key, rules) =>
- val columnName = key.split("\\.").toList.tail.head
- rules.split(",").map { ruleType =>
- if (!dataQualityCheckRules.contains(ruleType.trim)) {
- val msg = s"rule type: ${ruleType.trim} is missing from config file quality-check.yaml"
- ETLLogger.error(msg)
- throw DataQualityCheckRuleMissingException(msg)
- }
- dataQualityCheckRules(ruleType.trim).withColumn(columnName)
- }.toSeq
- }
- .toSeq
-
-
- def recordCheckResult(jobId: String, jobScheduleId: String, results: Seq[DataQualityCheckResult]): Unit = {
- results
- .filter(it => it.warnCount > 0 || it.errorCount > 0)
- .map(it =>
- QualityCheckLog(jobId, jobScheduleId, it.column, it.dataCheckType, it.ids, it.errorType, it.warnCount, it.errorCount)
- ).foreach(qualityCheckAccessor.create)
- }
-
- def checkDuplicate(tempViewName: String, idColumn: String,
- sortColumns: String, desc: Boolean, topN: Int = DEFAULT_TOP_N): CheckStep[DataFrame] = {
- // val windowSql = windowByPkSql(tempViewName, idColumn, sortColumns, desc)
- // val distinctDf = dropUnusedCols(execute(windowSql), "__row_num")
- // createView(distinctDf, tempViewName)
-
- val duplicatedViewName = s"${tempViewName}_duplicated"
- val duplicatedDf = execute(windowByPkSqlErrors(tempViewName, idColumn, sortColumns, desc))
- createView(duplicatedDf, duplicatedViewName)
-
- val configs = Seq(DataQualityConfig(idColumn, "Duplicated PK check", "", ErrorType.error))
- val errors: Seq[DataQualityCheckResult] = queryCheckResult(generateErrorUnions(configs, topN, duplicatedViewName))
-
- CheckStep(Seq(), errors, "") // "" because there are no data will be filtered here.
- }
-
- def check(tempViewName: String, dataQualityCheckMapping: Seq[DataQualityConfig],
- idColumn: String, topN: Int = DEFAULT_TOP_N): CheckStep[DataFrame] = {
- val resultView = s"${tempViewName}_result"
- val resultSql = checkSql(tempViewName, resultView, dataQualityCheckMapping, idColumn)
- execute(resultSql)
- val warns: Seq[DataQualityCheckResult] = queryCheckResult(generateWarnUnions(dataQualityCheckMapping, topN, resultView))
- val errors: Seq[DataQualityCheckResult] = queryCheckResult(generateErrorUnions(dataQualityCheckMapping, topN, resultView))
-
- CheckStep(warns, errors, antiJoinSql(idColumn, tempViewName, resultView))
- }
-
- def checkUDR(tempViewName: String, dataQualityCheckMapping: Seq[DataQualityConfig],
- idColumn: String, topN: Int = DEFAULT_TOP_N): (CheckStep[DataFrame], Seq[String]) = {
- val udrWithViews = dataQualityCheckMapping.map { udr =>
- val className = udr.rule.replace(s"${UserDefinedRule.PREFIX}.", "")
- val (sql, viewName) = reflectObjectMethod(className, "check", tempViewName, idColumn, udr).asInstanceOf[(String, String)]
- execute(sql)
- (udr, viewName)
- }
- val warns: Seq[DataQualityCheckResult] = queryCheckResult(udrWarnSql(topN, udrWithViews.filter(_._1.errorType == "warn")))
- val errors: Seq[DataQualityCheckResult] = queryCheckResult(udrErrorSql(topN, udrWithViews.filter(_._1.errorType == "error")))
-
- val antiJoinSql = udrAntiJoinSql(idColumn, tempViewName, udrWithViews.filter(_._1.errorType == "error").map(_._2))
-
- (CheckStep(warns, errors, antiJoinSql), udrWithViews.map(_._2))
- }
-
- def queryCheckResult(sql: String): Seq[DataQualityCheckResult]
-
- def execute(sql: String): DataFrame
-
- def createView(df: DataFrame, tempViewName: String): Unit
-
- def dropView(tempViewName: String): Unit
-
- def dropUnusedCols(df: DataFrame, cols: String): DataFrame
-
- def windowByPkSql(tempViewName: String, idColumns: String, sortColumns: String = "", desc: Boolean = true): String = {
- s"""
- |SELECT *
- |FROM (SELECT *, ROW_NUMBER()
- | OVER (PARTITION BY $idColumns
- | ORDER BY ${if (isNullOrEmpty(sortColumns)) "1" else sortColumns} ${if (desc) "DESC" else "ASC"}) as __row_num
- | FROM $tempViewName
- |) WHERE __row_num = 1""".stripMargin
- }
-
-
- def windowByPkSqlErrors(tempViewName: String, idColumns: String, sortColumns: String = "", desc: Boolean = true): String = {
- s"""
- |SELECT ${joinIdColumns(idColumns)} as id,
- | ARRAY('Duplicated PK check$DELIMITER$idColumns') as error_result
- |FROM (SELECT *, ROW_NUMBER()
- | OVER (PARTITION BY $idColumns
- | ORDER BY ${if (isNullOrEmpty(sortColumns)) "1" else sortColumns} ${if (desc) "DESC" else "ASC"}) as __row_num
- | FROM $tempViewName
- |) WHERE __row_num > 1""".stripMargin
- }
-
- def generateErrorUnions(dataQualityCheckMapping: Seq[DataQualityConfig], topN: Int, view: String): String = {
- dataQualityCheckMapping
- .filter(_.errorType == ErrorType.error)
- .map(it =>
- s"""(SELECT
- | "${it.column}" as column,
- | "${it.dataCheckType}" as dataCheckType,
- | arrayJoin(top(collect_list(string(id)), $topN), ',') as ids,
- | "${it.errorType}" as errorType,
- | 0 as warnCount,
- | count(*) as errorCount
- |FROM `$view`
- |WHERE array_contains(error_result, "${it.dataCheckType}${DELIMITER}${it.column}")
- |)""".stripMargin
- )
- .mkString("\nUNION ALL\n")
- }
-
- def generateWarnUnions(dataQualityCheckMapping: Seq[DataQualityConfig], topN: Int, view: String): String = {
- dataQualityCheckMapping
- .filter(_.errorType == ErrorType.warn)
- .map(it =>
- s"""(SELECT
- | "${it.column}" as column,
- | "${it.dataCheckType}" as dataCheckType,
- | arrayJoin(top(collect_list(string(id)), $topN), ',') as ids,
- | "${it.errorType}" as errorType,
- | count(*) as warnCount,
- | 0 as errorCount
- |FROM `$view`
- |WHERE array_contains(warn_result, "${it.dataCheckType}${DELIMITER}${it.column}")
- |)""".stripMargin
- )
- .mkString("\nUNION ALL\n")
- }
-
- def checkSql(tempViewName: String, resultView: String, dataQualityCheckMapping: Seq[DataQualityConfig], idColumn: String): String = {
- s"""
- |CREATE TEMPORARY VIEW $resultView
- |AS SELECT ${joinIdColumns(idColumn)} as id,
- | flatten(ARRAY(${generateWarnCases(dataQualityCheckMapping)}
- | )) as warn_result,
- | flatten(ARRAY(${generateErrorCases(dataQualityCheckMapping)}
- | )) as error_result
- |FROM `$tempViewName`
- """.stripMargin
- }
-
- def udrWarnSql(topN: Int, udrWithViews: Seq[(DataQualityConfig, String)])
- : String = {
- if (udrWithViews.isEmpty) {
- StringUtil.EMPTY
- } else {
- udrWithViews.map { case (udr, viewName) =>
- s"""
- |(SELECT "${udr.column}" as column,
- | "${udr.dataCheckType}" as dataCheckType,
- | arrayJoin(top(collect_list(string(id)), $topN), ',') as ids,
- | "${udr.errorType}" as errorType,
- | count(*) as warnCount,
- | 0 as errorCount
- |FROM $viewName)
- |""".stripMargin
- }
- .mkString("\nUNION ALL\n")
- }
- }
-
- def udrErrorSql(topN: Int, udrWithViews: Seq[(DataQualityConfig, String)])
- : String = {
- if (udrWithViews.isEmpty) {
- StringUtil.EMPTY
- } else {
- udrWithViews.map { case (udr, viewName) =>
- s"""
- |(SELECT "${udr.column}" as column,
- | "${udr.dataCheckType}" as dataCheckType,
- | arrayJoin(top(collect_list(string(id)), $topN), ',') as ids,
- | "${udr.errorType}" as errorType,
- | 0 as warnCount,
- | count(*) as errorCount
- |FROM $viewName)
- |""".stripMargin
- }
- .mkString("\nUNION ALL\n")
- }
- }
-
-
- def antiJoinSql(idColumn: String, tempViewName: String, resultView: String): String = {
- s"""|LEFT ANTI JOIN (
- | SELECT id FROM `$resultView`
- | WHERE size(error_result) > 0
- |) bad_ids ON bad_ids.id = ${joinIdColumns(idColumn, tempViewName)}
- """.stripMargin
- }
-
- def udrAntiJoinSql(idColumn: String, tempViewName: String, viewNames: Seq[String]): String = {
- if (viewNames.isEmpty) {
- StringUtil.EMPTY
- } else {
- s"""|LEFT ANTI JOIN (
- |${viewNames.map(view => s"SELECT id FROM $view").mkString("\nUNION ALL\n")}
- |) udr_bad_ids ON udr_bad_ids.id = ${joinIdColumns(idColumn, tempViewName)}
- |""".stripMargin
- }
- }
-}
-// scalastyle:on
-
-object QualityCheck {
- val DELIMITER = "__"
- val DEFAULT_TOP_N = 1000
-
- def emptyArrayIfMissing(query: String): String = {
- if (query.trim == "") {
- "array()"
- } else {
- query
- }
- }
-
- def generateWarnCases(dataQualityCheckMapping: Seq[DataQualityConfig]): String = {
- emptyArrayIfMissing(dataQualityCheckMapping
- .filter(_.errorType == ErrorType.warn)
- .map(it => s"""CASE WHEN ${it.rule} THEN array("${it.dataCheckType}${DELIMITER}${it.column}") ELSE array() END""")
- .mkString(",\n\t\t\t\t")
- )
- }
-
- def generateErrorCases(dataQualityCheckMapping: Seq[DataQualityConfig]): String = {
- emptyArrayIfMissing(dataQualityCheckMapping
- .filter(_.errorType == ErrorType.error)
- .map(it => s"""CASE WHEN ${it.rule} THEN array("${it.dataCheckType}${DELIMITER}${it.column}") ELSE array() END""")
- .mkString(",\n\t\t\t\t")
- )
- }
-
- def joinIdColumns(idColumn: String, prefix: String = ""): String = {
- val realPrefix = if (isNullOrEmpty(prefix)) "" else s"`$prefix`."
- if (idColumn.contains(",")) {
- idColumn.split(",").map(it => s"ifnull($realPrefix`${it.trim}`, 'NULL')").mkString("CONCAT(", s", '$DELIMITER', ", ")")
- } else {
- s"$realPrefix`$idColumn`"
- }
- }
-
- def joinOnConditions(resultView: String, tempViewName: String, idColumn: String): String = {
- idColumn.split(",").map(_.trim).zipWithIndex.map { case (column: String, idx: Int) =>
- s"""`$tempViewName`.`$column` = split(`$resultView`.id, '$DELIMITER')[$idx]"""
- }.mkString(" AND \n\t")
- }
-
- def generateAntiJoinSql(sql: String, udrSql: String, tempViewName: String): String = {
- if (isNullOrEmpty(sql) && isNullOrEmpty(udrSql)) {
- s"""SELECT * FROM `$tempViewName`"""
- } else {
- s"""|SELECT `$tempViewName`.* FROM `$tempViewName`
- |$sql
- |$udrSql
- """.stripMargin
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRule.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRule.scala
deleted file mode 100644
index f7492b5..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRule.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality
-
-final case class QualityCheckRule(dataCheckType: String, rule: String, errorType: String) {
- def withColumn(column: String): DataQualityConfig = {
- if (rule.contains("$")) {
- DataQualityConfig(column, dataCheckType, rule.replace("$column", s"`$column`").replaceAll("``", "`"), errorType)
- } else {
- DataQualityConfig(column, dataCheckType, rule, errorType)
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRuleConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRuleConfig.scala
deleted file mode 100644
index 6f8f614..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/QualityCheckRuleConfig.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.BadDataQualityCheckRuleException
-
-import scala.io.Source
-
-object QualityCheckRuleConfig extends Serializable {
- def readQualityCheckRules(): Map[String, QualityCheckRule] = {
- val yamlString = Source.fromInputStream(getClass.getResourceAsStream("/quality-check.yaml")).getLines.mkString("\n")
-
- import cats.syntax.either._
- import io.circe._
- import io.circe.generic.auto._
- import io.circe.yaml.parser
-
- parser.parse(yamlString)
- .leftMap(err => err: Error)
- .flatMap(_.as[List[QualityCheckRule]])
- .valueOr(it => throw BadDataQualityCheckRuleException(
- s"Data quality check rules not valid, please check config file: quality-check.yaml, error: ${it.getMessage}")
- )
- .groupBy(_.dataCheckType)
- .mapValues(_.head)
- .toMap
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/UserDefinedRule.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/UserDefinedRule.scala
deleted file mode 100644
index 2093692..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/UserDefinedRule.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-
-@Stable(since = "1.0.0")
-trait UserDefinedRule extends Serializable {
- def check(tempViewName: String, idColumn: String, udr: DataQualityConfig): (String, String)
-}
-
-object UserDefinedRule extends Serializable {
- val PREFIX = "UDR"
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/udr/DuplicatedCheck.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/udr/DuplicatedCheck.scala
deleted file mode 100644
index e135ed1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/quality/udr/DuplicatedCheck.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.core.quality.udr
-
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck.joinIdColumns
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityConfig, UserDefinedRule}
-
-object DuplicatedCheck extends UserDefinedRule {
- override def check(tempViewName: String, idColumn: String, udr: DataQualityConfig): (String, String) = {
- val resultViewName = s"${tempViewName}__${udr.dataCheckType.replace(' ', '_')}__${udr.column}"
- val sql =
- s"""|CREATE TEMPORARY VIEW $resultViewName
- | (ID COMMENT 'duplicated id')
- | AS SELECT ${joinIdColumns(idColumn, prefix = "a")} AS id
- | FROM `$tempViewName` a
- | INNER JOIN (SELECT `$tempViewName`.`${udr.column}`
- | FROM `$tempViewName`
- | GROUP BY `$tempViewName`.`${udr.column}`
- | HAVING count(*) > 1) b
- | ON a.`${udr.column}` = b.`${udr.column}`
- |""".stripMargin
- (sql, resultViewName)
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/EncryptedDataSourceFactory.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/EncryptedDataSourceFactory.scala
deleted file mode 100644
index f7b62ac..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/EncryptedDataSourceFactory.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
-import org.apache.ibatis.datasource.DataSourceFactory
-
-import java.util.Properties
-import javax.sql.DataSource
-
-case object HikariDataSource {
- private val hikariConfig = new HikariConfig()
-
- lazy val hikariDataSource = new HikariDataSource(hikariConfig)
-
- def setProperties(properties: Properties): Unit = {
- hikariConfig.setDriverClassName(ETLConfig.getProperty("flyway.driver"))
- hikariConfig.setJdbcUrl(ETLConfig.getProperty("flyway.url"))
- hikariConfig.setUsername(ETLConfig.getProperty("flyway.username"))
- hikariConfig.setPassword(ETLConfig.getProperty("flyway.password"))
- // 1800000 (30 minutes)
- // hikariConfig.setKeepaliveTime(ETLConfig.getProperty("flyway.keepalivetime", "1800000").toLong)
- // A value of 0 indicates no maximum lifetime (infinite lifetime)
- hikariConfig.setMaxLifetime(ETLConfig.getProperty("flyway.maxlifetime", "0").toLong)
- hikariConfig.setMaximumPoolSize(ETLConfig.getProperty("flyway.maxpoolsize", "1").toInt)
- }
-}
-
-case class EncryptedDataSourceFactory() extends DataSourceFactory {
- override def setProperties(properties: Properties): Unit = {
- HikariDataSource.setProperties(properties)
- }
-
- override def getDataSource: DataSource = {
- HikariDataSource.hikariDataSource
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/JobLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/JobLogAccessor.scala
deleted file mode 100644
index 2659466..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/JobLogAccessor.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.util.{Constants, JDBCUtil}
-
-import java.time.LocalDateTime
-
-abstract class JobLogAccessor() {
- def lastSuccessExecuted(workflowName: String): JobLog
-
- def lastExecuted(workflowName: String): JobLog
-
- def executionsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[JobLog]
-
- def executionsLastYear(workflowName: String): Array[JobLog]
-
- def isAnotherJobRunning(jobName: String): JobLog
-
- def create(jobLog: JobLog): Unit = {
- jobLog.jobStartTime = LocalDateTime.now()
- jobLog.createTime = LocalDateTime.now()
- jobLog.lastUpdateTime = LocalDateTime.now()
- }
-
- def update(jobLog: JobLog): Unit = {
- jobLog.lastUpdateTime = LocalDateTime.now()
- }
-
- def updateStatus(jobLog: JobLog): Unit = {
- jobLog.lastUpdateTime = LocalDateTime.now()
- }
-
- def getLatestSuccessJobLogByNames(wfNames: Array[String]): Array[JobLog]
-
- def getPreviousJobLog(jobLog: JobLog): JobLog
-
- def getUnprocessedUpstreamJobLog(upstreamWFName: String, upstreamLogId: BigInt): Array[JobLog]
-}
-
-object JobLogAccessor {
- lazy val jobLogAccessor: JobLogAccessor = JobLogAccessor.getInstance(JDBCUtil.dbType)
-
- private def getInstance(databaseType: String): JobLogAccessor = {
- databaseType match {
- case Constants.ETLDatabaseType.MSSQL => new com.github.sharpdata.sharpetl.core.repository.mssql.JobLogAccessor()
- case Constants.ETLDatabaseType.H2 => new com.github.sharpdata.sharpetl.core.repository.mysql.JobLogAccessor()
- case Constants.ETLDatabaseType.MYSQL => new com.github.sharpdata.sharpetl.core.repository.mysql.JobLogAccessor()
- case Constants.ETLDatabaseType.SPARK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.spark.JobLogAccessor()
- case Constants.ETLDatabaseType.FLINK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.flink.JobLogAccessor()
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/MyBatisSession.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/MyBatisSession.scala
deleted file mode 100644
index 2c8c7f3..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/MyBatisSession.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository
-
-import org.apache.ibatis.io.Resources
-import org.apache.ibatis.session.{SqlSession, SqlSessionFactory, SqlSessionFactoryBuilder}
-
-object MyBatisSession {
- private var sqlSessionFactory: SqlSessionFactory = getFactory()
-
- private def getFactory() = {
- val resource = s"mybatis-config.xml"
- val inputStream = Resources.getResourceAsStream(resource)
-
- new SqlSessionFactoryBuilder().build(inputStream)
- }
-
- // for test
- def reloadFactory(): Unit = {
- sqlSessionFactory = getFactory()
- }
-
- def execute[T](query: SqlSession => T): T = {
- var session: Option[SqlSession] = None
- try {
- session = Some(sqlSessionFactory.openSession(true))
- val sessionValue = session.get
- query(sessionValue)
- }
- finally {
- if (session.nonEmpty) {
- session.get.close()
- }
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/QualityCheckAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/QualityCheckAccessor.scala
deleted file mode 100644
index 8bd7cbd..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/QualityCheckAccessor.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository
-
-import com.github.sharpdata.sharpetl.core.util.Constants
-import com.github.sharpdata.sharpetl.core.util.Constants.ETLDatabaseType
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-
-abstract class QualityCheckAccessor() {
- def create(log: QualityCheckLog): Unit
-}
-
-
-object QualityCheckAccessor {
- def getInstance(databaseType: String): QualityCheckAccessor = {
- databaseType match {
- case ETLDatabaseType.MSSQL => new com.github.sharpdata.sharpetl.core.repository.mssql.QualityCheckAccessor()
- case Constants.ETLDatabaseType.H2 => new com.github.sharpdata.sharpetl.core.repository.mysql.QualityCheckAccessor()
- case Constants.ETLDatabaseType.MYSQL => new com.github.sharpdata.sharpetl.core.repository.mysql.QualityCheckAccessor()
- case Constants.ETLDatabaseType.SPARK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.spark.QualityCheckAccessor()
- case Constants.ETLDatabaseType.FLINK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.flink.QualityCheckAccessor()
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/StepLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/StepLogAccessor.scala
deleted file mode 100644
index bdb0fda..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/StepLogAccessor.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository
-
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.Constants
-import com.github.sharpdata.sharpetl.core.util.Constants.ETLDatabaseType
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.{Constants, JDBCUtil}
-
-import java.time.LocalDateTime
-
-abstract class StepLogAccessor() {
-
- def create(stepLog: StepLog): Unit
-
- def update(stepLog: StepLog): Unit
-
- def stepLogs(jobId: Long): Array[StepLog]
-
- def stepLogsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[StepLog]
-
-}
-
-object StepLogAccessor {
- lazy val stepLogAccessor: StepLogAccessor = StepLogAccessor.getInstance(JDBCUtil.dbType)
-
- private def getInstance(databaseType: String): StepLogAccessor = {
- databaseType match {
- case ETLDatabaseType.MSSQL => new com.github.sharpdata.sharpetl.core.repository.mssql.StepLogAccessor()
- case Constants.ETLDatabaseType.H2 => new com.github.sharpdata.sharpetl.core.repository.mysql.StepLogAccessor()
- case Constants.ETLDatabaseType.MYSQL => new com.github.sharpdata.sharpetl.core.repository.mysql.StepLogAccessor()
- case Constants.ETLDatabaseType.SPARK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.spark.StepLogAccessor()
- case Constants.ETLDatabaseType.FLINK_SHARP_ETL => new com.github.sharpdata.sharpetl.core.repository.flink.StepLogAccessor()
- }
- }
-}
-
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/JobLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/JobLogAccessor.scala
deleted file mode 100644
index 1b54e38..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/JobLogAccessor.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.flink
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.flink.JobLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-import java.time.LocalDateTime.now
-
-class JobLogAccessor() extends repository.JobLogAccessor() {
-
-
- def lastSuccessExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastSuccessExecuted(workflowName)
- })
- }
-
- override def lastExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastExecuted(workflowName)
- })
- }
-
- def isAnotherJobRunning(jobName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.isAnotherJobRunning(jobName)
- })
- }
-
- override def create(jobLog: JobLog): Unit = {
- super.create(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.createJobLog(jobLog)
- jobLog
- })
- }
-
- override def update(jobLog: JobLog): Unit = {
- super.update(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.updateJobLog(jobLog)
- jobLog
- })
- }
-
- override def updateStatus(jobLog: JobLog): Unit = {
- super.updateStatus(jobLog)
- update(jobLog)
- }
-
- override def getLatestSuccessJobLogByNames(wfNames: Array[String]): Array[JobLog] = {
- wfNames.map(name => {
- this.lastSuccessExecuted(name)
- }).filterNot(_ == null)
- }
-
- override def executionsLastYear(workflowName: String): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsLastYear(workflowName, now().minusYears(1L).format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def executionsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getPreviousJobLog(jobLog: JobLog): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastJobLog(jobLog.workflowName, jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getUnprocessedUpstreamJobLog(upstreamWFName: String, upstreamLogId: BigInt): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.unprocessedUpstreamJobLog(upstreamWFName, upstreamLogId.toString())
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/QualityCheckAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/QualityCheckAccessor.scala
deleted file mode 100644
index ba8ca4d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/QualityCheckAccessor.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.flink
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.flink
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-
-class QualityCheckAccessor() extends repository.QualityCheckAccessor() {
- def create(log: QualityCheckLog): Unit = {
- execute[QualityCheckLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[flink.QualityCheckLogMapper])
- mapper.create(log)
- log
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/StepLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/StepLogAccessor.scala
deleted file mode 100644
index 39b74a8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/flink/StepLogAccessor.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.flink
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.flink.StepLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-
-class StepLogAccessor() extends repository.StepLogAccessor() {
-
- def create(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.createStepLog(stepLog)
- stepLog
- })
- }
-
- def update(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.updateStepLog(stepLog)
- stepLog
- })
- }
-
- def stepLogs(jobId: Long): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogs(jobId)
- })
- }
-
- def stepLogsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/JobLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/JobLogMapper.scala
deleted file mode 100644
index 3c48c0c..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/JobLogMapper.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.flink
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import org.apache.ibatis.annotations.{Insert, Param, Select, Update}
-
-/**
- * Logs access for [[LogDrivenJob]]
- */
-trait JobLogMapper extends Serializable {
-
- /**
- * job executions in last year
- *
- * @param workflowName
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' and job_start_time > #{lastYear}"
- ))
- def executionsLastYear(@Param("workflowName") workflowName: String, @Param("lastYear") lastYear: String): Array[JobLog]
-
- /**
- * job executions between
- *
- * @param
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where job_start_time >= #{startTime} and job_start_time < #{endTime}"
- ))
- def executionsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[JobLog]
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status != 'RUNNING' order by data_range_start desc, job_id desc limit 1"
- ))
- def lastExecuted(jobName: String): JobLog
-
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' order by data_range_start desc limit 1"
- ))
- def lastSuccessExecuted(jobName: String): JobLog
-
- /**
- * 判断是否有另一个相同[[ExecPeriod]]的任务在运行
- *
- * @param jobName 区分[[ExecPeriod]]
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where job_name = #{workflowName} and status = 'RUNNING' limit 1"
- ))
- def isAnotherJobRunning(jobName: String): JobLog
-
- @Insert(Array("insert into sharp_etl.job_log(job_id, job_name, `period`, workflow_name," +
- "data_range_start, data_range_end," +
- "job_start_time, job_end_time, " +
- "status, create_time," +
- "last_update_time, file, application_id, project_name, load_type, log_driven_type, runtime_args) values " +
- "(#{jobId}, #{jobName}, #{period}, #{workflowName}, " +
- "#{dataRangeStart}, #{dataRangeEnd}, NOW(), NOW(), " +
- "#{status}, NOW(), NOW(), #{file}, #{applicationId}, #{projectName}, #{loadType}, #{logDrivenType}, #{runtimeArgs})"
- ))
- def createJobLog(jobLog: JobLog): Unit
-
- @Update(Array(
- "update sharp_etl.job_log set " +
- "workflow_name = #{workflowName}, " +
- "`period` = #{period}, " +
- "job_name = #{jobName}, " +
- "data_range_start = #{dataRangeStart}, " +
- "data_range_end = #{dataRangeEnd}, " +
- "job_start_time = TO_TIMESTAMP(#{jobStartTime}, 'yyyy-MMM-dd HH:mm:ss'), " +
- "job_end_time = TO_TIMESTAMP(#{jobEndTime}, 'yyyy-MMM-dd HH:mm:ss'), " +
- "status = #{status}, " +
- "create_time = TO_TIMESTAMP(#{createTime}, 'yyyy-MMM-dd HH:mm:ss'), " +
- "last_update_time = TO_TIMESTAMP(#{lastUpdateTime}, 'yyyy-MMM-dd HH:mm:ss'), " +
- "file = #{file}, " +
- "application_id = #{applicationId}, " +
- "load_type = #{loadType}, " +
- "log_driven_type = #{logDrivenType}, " +
- "project_name = #{projectName}, " +
- "runtime_args = #{runtimeArgs} " +
- "where job_id = #{jobId}"
- ))
- def updateJobLog(jobLog: JobLog): Unit
-
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and job_start_time < #{jobStartTime} order by job_start_time desc limit 1"
- ))
- def lastJobLog(@Param("workflowName") workflowName: String, @Param("jobStartTime") jobStartTime: String): JobLog
-
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where status='SUCCESS' and workflow_name = #{upstreamWFName} and job_id > #{upstreamLogId} order by job_id"
- ))
- def unprocessedUpstreamJobLog(@Param("upstreamWFName") upstreamWFName: String, @Param("upstreamLogId") upstreamLogId: String): Array[JobLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/QualityCheckLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/QualityCheckLogMapper.scala
deleted file mode 100644
index 7b7acaa..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/QualityCheckLogMapper.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.flink
-
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-import org.apache.ibatis.annotations.Insert
-
-trait QualityCheckLogMapper {
- @Insert(Array(
- "insert into sharp_etl.quality_check_log(id, job_id, job_name, `column`, data_check_type, ids, error_type, warn_count, " +
- "error_count, create_time, last_update_time)",
- "values ",
- "(#{id}, #{jobId}, #{jobName}, #{column}, #{dataCheckType}, #{ids}, #{errorType}, CAST(#{warnCount} as INT), CAST(#{errorCount} as INT), NOW(), NOW())"
- ))
- def create(jobError: QualityCheckLog): Unit
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/StepLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/StepLogMapper.scala
deleted file mode 100644
index 2ffa28f..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/flink/StepLogMapper.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.flink
-
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import org.apache.ibatis.annotations._
-
-trait StepLogMapper extends Serializable {
-
- @Insert(Array("insert into sharp_etl.step_log(" +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- ") values (" +
- "#{jobId}, #{stepId}, #{status}, NOW(), NOW(), #{duration}, #{output}, #{error}, " +
- "#{sourceCount}, #{targetCount}, #{successCount}, #{failureCount}, #{sourceType}, #{targetType}" +
- ")"
- ))
- def createStepLog(stepLog: StepLog): Unit
-
- @Update(Array(
- "update sharp_etl.step_log set " +
- "status = #{status}, " +
- "start_time = TO_TIMESTAMP(#{startTime}, 'yyyy-MMM-dd HH:mm:ss'), " +
- "end_time = NOW(), " +
- "duration = #{duration}, " +
- "output = #{output}, " +
- "error = #{error}, " +
- "source_count = #{sourceCount}, " +
- "target_count = #{targetCount}, " +
- "success_count = #{successCount}, " +
- "failure_count = #{failureCount}, " +
- "source_type = #{sourceType}, " +
- "target_type = #{targetType} " +
- "where job_id = #{jobId} and step_id = #{stepId}"
- ))
- def updateStepLog(stepLog: StepLog): Unit
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id = #{jobId}"
- ))
- def stepLogs(jobId: Long): Array[StepLog]
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id in" +
- " (select job_id from sharp_etl.job_log " +
- " where job_start_time >= #{startTime}" +
- " and job_start_time < #{endTime})"
- ))
- def stepLogsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[StepLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/JobLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/JobLogMapper.scala
deleted file mode 100644
index f3b5d6f..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/JobLogMapper.scala
+++ /dev/null
@@ -1,119 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mssql
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import org.apache.ibatis.annotations.{Insert, Options, Param, Select, Update}
-
-/**
- * Logs access for [[LogDrivenJob]]
- */
-trait JobLogMapper extends Serializable {
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select top 1 " +
- "*" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status != 'RUNNING' order by data_range_start desc, job_id desc"
- ))
- def lastExecuted(workflowName: String): JobLog
-
- /**
- * 最新一次成功执行的任务
- *
- * @return
- */
- @Select(Array(
- "select top 1 " +
- "*" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' order by data_range_start desc"
- ))
- def lastSuccessExecuted(workflowName: String): JobLog
-
- /**
- * job executions in last year
- *
- * @param workflowName
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' and job_start_time > #{lastYear}"
- ))
- def executionsLastYear(@Param("workflowName") workflowName: String, @Param("lastYear") lastYear: String): Array[JobLog]
-
- /**
- * job executions between
- *
- * @param startTime
- * @param endTime
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where job_start_time >= #{startTime} and job_start_time < #{endTime}"
- ))
- def executionsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[JobLog]
-
- /**
- * 判断是否有另一个相同[[ExecPeriod]]的任务在运行
- *
- * @param jobName 区分[[ExecPeriod]]
- * @return
- */
- @Select(Array(
- "select top 1 " +
- "*" +
- " from sharp_etl.job_log where job_name = #{jobName} and status = 'RUNNING'"
- ))
- def isAnotherJobRunning(jobName: String): JobLog
-
- @Insert(Array("insert into sharp_etl.job_log(job_id, job_name, [period], workflow_name," +
- "data_range_start, data_range_end," +
- "job_start_time, job_end_time, " +
- "status, create_time," +
- "last_update_time, file, application_id, project_name, load_type, log_driven_type, runtime_args) values " +
- "(#{jobId}, #{jobName}, #{period}, #{workflowName}, " +
- "#{dataRangeStart}, #{dataRangeEnd}, #{jobStartTime}, #{jobEndTime}, " +
- "#{status}, #{createTime}, #{lastUpdateTime}, #{file}, #{applicationId}, #{projectName}, #{loadType}, #{logDrivenType}, #{runtimeArgs})"
- ))
- //@Options(useGeneratedKeys = true, keyProperty = "jobId")
- def createJobLog(jobLog: JobLog): Unit
-
-
- @Update(Array(
- "update sharp_etl.job_log set " +
- "job_name = #{jobName}, " +
- "[period] = #{period}, " +
- "workflow_name = #{workflowName}, " +
- "data_range_start = #{dataRangeStart}, " +
- "data_range_end = #{dataRangeEnd}, " +
- "job_start_time = #{jobStartTime}, " +
- "job_end_time = #{jobEndTime}, " +
- "status = #{status}, " +
- "create_time = #{createTime}, " +
- "last_update_time = #{lastUpdateTime}, " +
- "file = #{file}, " +
- "application_id = #{applicationId}, " +
- "load_type = #{loadType}, " +
- "log_driven_type = #{logDrivenType}, " +
- "project_name = #{projectName}, " +
- "runtime_args = #{runtimeArgs} " +
- "where job_id = #{jobId}"
- ))
- def updateJobLog(jobLog: JobLog): Unit
-
- @Select(Array(
- "select top 1 " +
- "*" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and job_start_time < #{jobStartTime} order by job_start_time desc"
- ))
- def lastJobLog(@Param("workflowName") workflowName: String, @Param("jobStartTime") jobStartTime: String): JobLog
-
- @Select(Array(
- "select *" +
- " from job_log where status='SUCCESS' and workflow_name = #{upstreamWFName} and job_id > #{upstreamLogId} order by job_id"
- ))
- def unprocessedUpstreamJobLog(@Param("upstreamWFName") upstreamWFName: String, @Param("upstreamLogId") upstreamLogId: BigInt): Array[JobLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/QualityCheckLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/QualityCheckLogMapper.scala
deleted file mode 100644
index 22fedeb..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/QualityCheckLogMapper.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mssql
-
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-import org.apache.ibatis.annotations.Insert
-
-trait QualityCheckLogMapper {
- @Insert(Array(
- "insert into sharp_etl.quality_check_log(job_id, job_name, [column], data_check_type, ids, error_type, warn_count, " +
- "error_count, create_time, last_update_time)",
- "values ",
- "(#{jobId}, #{job_name}, #{column}, #{dataCheckType}, #{ids}, #{errorType}, #{warnCount}, #{errorCount}, #{createTime}, #{lastUpdateTime})"
- ))
- def create(jobError: QualityCheckLog): Unit
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/StepLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/StepLogMapper.scala
deleted file mode 100644
index 521576d..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mssql/StepLogMapper.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mssql
-
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import org.apache.ibatis.annotations.{Insert, Param, Select, Update}
-
-trait StepLogMapper extends Serializable {
-
- @Insert(Array("insert into sharp_etl.step_log(" +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- ") values (" +
- "#{jobId}, #{stepId}, #{status}, #{startTime}, #{endTime}, #{duration}, #{output}, #{error}, " +
- "#{sourceCount}, #{targetCount}, #{successCount}, #{failureCount}, #{sourceType}, #{targetType}" +
- ")"
- ))
- def createStepLog(stepLog: StepLog): Unit
-
- @Update(Array(
- "update sharp_etl.step_log set " +
- "job_id = #{jobId}, " +
- "step_id = #{stepId}, " +
- "status = #{status}, " +
- "start_time = #{startTime}, " +
- "end_time = #{endTime}, " +
- "duration = #{duration}, " +
- "output = #{output}, " +
- "error = #{error}, " +
- "source_count = #{sourceCount}, " +
- "target_count = #{targetCount}, " +
- "success_count = #{successCount}, " +
- "failure_count = #{failureCount}, " +
- "source_type = #{sourceType}, " +
- "target_type = #{targetType} " +
- "where job_id = #{jobId} and step_id = #{stepId}"
- ))
- def updateStepLog(stepLog: StepLog): Unit
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id = #{jobId}"
- ))
- def stepLogs(jobId: Long): Array[StepLog]
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id in" +
- " (select job_id from sharp_etl.job_log " +
- " where job_start_time >= #{startTime}" +
- " and job_start_time < #{endTime})"
- ))
- def stepLogsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[StepLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/JobLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/JobLogMapper.scala
deleted file mode 100644
index 93d0867..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/JobLogMapper.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import org.apache.ibatis.annotations.{Insert, Options, Param, Select, Update}
-
-/**
- * Logs access for [[LogDrivenJob]]
- */
-trait JobLogMapper extends Serializable {
-
- /**
- * job executions in last year
- *
- * @param workflowName
- * @return
- */
- @Select(Array(
- "select *" +
- " from job_log where workflow_name = #{workflowName} and status = 'SUCCESS' and job_start_time > #{lastYear}"
- ))
- def executionsLastYear(@Param("workflowName") workflowName: String, @Param("lastYear") lastYear: String): Array[JobLog]
-
- /**
- * job executions between
- *
- * @param
- * @return
- */
- @Select(Array(
- "select *" +
- " from job_log where job_start_time >= #{startTime} and job_start_time < #{endTime}"
- ))
- def executionsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[JobLog]
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from job_log where workflow_name = #{workflowName} and status != 'RUNNING' order by data_range_start desc, job_id desc limit 1"
- ))
- def lastExecuted(jobName: String): JobLog
-
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from job_log where workflow_name = #{workflowName} and status = 'SUCCESS' order by data_range_start desc limit 1"
- ))
- def lastSuccessExecuted(jobName: String): JobLog
-
- /**
- * 判断是否有另一个相同[[ExecPeriod]]的任务在运行
- *
- * @param jobName 区分[[ExecPeriod]]
- * @return
- */
- @Select(Array(
- "select *" +
- " from job_log where job_name = #{workflowName} and status = 'RUNNING' limit 1"
- ))
- def isAnotherJobRunning(jobName: String): JobLog
-
- @Insert(Array("insert into job_log(job_id, job_name, `period`, workflow_name," +
- "data_range_start, data_range_end," +
- "job_start_time, job_end_time, " +
- "status, create_time," +
- "last_update_time, file, application_id, project_name, load_type, log_driven_type, runtime_args) values " +
- "(#{jobId}, #{jobName}, #{period}, #{workflowName}, " +
- "#{dataRangeStart}, #{dataRangeEnd}, #{jobStartTime}, #{jobEndTime}, " +
- "#{status}, #{createTime}, #{lastUpdateTime}, #{file}, #{applicationId}, #{projectName}, #{loadType}, #{logDrivenType}, #{runtimeArgs})"
- ))
- //@Options(useGeneratedKeys = true, keyProperty = "jobId")
- def createJobLog(jobLog: JobLog): Unit
-
- @Update(Array(
- "update job_log set " +
- "workflow_name = #{workflowName}, " +
- "`period` = #{period}, " +
- "job_name = #{jobName}, " +
- "data_range_start = #{dataRangeStart}, " +
- "data_range_end = #{dataRangeEnd}, " +
- "job_start_time = #{jobStartTime}, " +
- "job_end_time = #{jobEndTime}, " +
- "status = #{status}, " +
- "create_time = #{createTime}, " +
- "last_update_time = #{lastUpdateTime}, " +
- "file = #{file}, " +
- "application_id = #{applicationId}, " +
- "load_type = #{loadType}, " +
- "log_driven_type = #{logDrivenType}, " +
- "project_name = #{projectName}, " +
- "runtime_args = #{runtimeArgs} " +
- "where job_id = #{jobId}"
- ))
- def updateJobLog(jobLog: JobLog): Unit
-
- @Select(Array(
- "select *" +
- " from job_log where workflow_name = #{workflowName} and job_start_time < #{jobStartTime} order by job_start_time desc limit 1"
- ))
- def lastJobLog(@Param("workflowName") workflowName: String, @Param("jobStartTime") jobStartTime: String): JobLog
-
- @Select(Array(
- "select *" +
- " from job_log where status='SUCCESS' and workflow_name = #{upstreamWFName} and job_id > #{upstreamLogId} order by job_id"
- ))
- def unprocessedUpstreamJobLog(@Param("upstreamWFName") upstreamWFName: String, @Param("upstreamLogId") upstreamLogId: String): Array[JobLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/QualityCheckLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/QualityCheckLogMapper.scala
deleted file mode 100644
index 56ab27a..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/QualityCheckLogMapper.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-import org.apache.ibatis.annotations.Insert
-
-trait QualityCheckLogMapper {
- @Insert(Array(
- "insert into quality_check_log(job_id, job_name, `column`, data_check_type, ids, error_type, warn_count, " +
- "error_count, create_time, last_update_time)",
- "values ",
- "(#{jobId}, #{jobName}, #{column}, #{dataCheckType}, #{ids}, #{errorType}, #{warnCount}, #{errorCount}, #{createTime}, #{lastUpdateTime})"
- ))
- def create(jobError: QualityCheckLog): Unit
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/StepLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/StepLogMapper.scala
deleted file mode 100644
index 0440785..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/mysql/StepLogMapper.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import org.apache.ibatis.annotations._
-
-trait StepLogMapper extends Serializable {
-
- @Insert(Array("insert into step_log(" +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- ") values (" +
- "#{jobId}, #{stepId}, #{status}, #{startTime}, #{endTime}, #{duration}, #{output}, #{error}, " +
- "#{sourceCount}, #{targetCount}, #{successCount}, #{failureCount}, #{sourceType}, #{targetType}" +
- ")"
- ))
- def createStepLog(stepLog: StepLog): Unit
-
- @Update(Array(
- "update step_log set " +
- "job_id = #{jobId}, " +
- "step_id = #{stepId}, " +
- "status = #{status}, " +
- "start_time = #{startTime}, " +
- "end_time = #{endTime}, " +
- "duration = #{duration}, " +
- "output = #{output}, " +
- "error = #{error}, " +
- "source_count = #{sourceCount}, " +
- "target_count = #{targetCount}, " +
- "success_count = #{successCount}, " +
- "failure_count = #{failureCount}, " +
- "source_type = #{sourceType}, " +
- "target_type = #{targetType} " +
- "where job_id = #{jobId} and step_id = #{stepId}"
- ))
- def updateStepLog(stepLog: StepLog): Unit
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id = #{jobId}"
- ))
- def stepLogs(jobId: Long): Array[StepLog]
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id in" +
- " (select job_id from sharp_etl.job_log " +
- " where job_start_time >= #{startTime}" +
- " and job_start_time < #{endTime})"
- ))
- def stepLogsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[StepLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/JobLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/JobLogMapper.scala
deleted file mode 100644
index 095335b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/JobLogMapper.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.spark
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import org.apache.ibatis.annotations.{Insert, Options, Param, Select, Update}
-
-/**
- * Logs access for [[LogDrivenJob]]
- */
-trait JobLogMapper extends Serializable {
-
- /**
- * job executions in last year
- *
- * @param workflowName
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' and job_start_time > #{lastYear}"
- ))
- def executionsLastYear(@Param("workflowName") workflowName: String, @Param("lastYear") lastYear: String): Array[JobLog]
-
- /**
- * job executions between
- *
- * @param
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where job_start_time >= #{startTime} and job_start_time < #{endTime}"
- ))
- def executionsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[JobLog]
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status != 'RUNNING' order by data_range_start desc, job_id desc limit 1"
- ))
- def lastExecuted(jobName: String): JobLog
-
-
- /**
- * 最新一次执行的任务
- *
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and status = 'SUCCESS' order by data_range_start desc limit 1"
- ))
- def lastSuccessExecuted(jobName: String): JobLog
-
- /**
- * 判断是否有另一个相同[[ExecPeriod]]的任务在运行
- *
- * @param jobName 区分[[ExecPeriod]]
- * @return
- */
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where job_name = #{workflowName} and status = 'RUNNING' limit 1"
- ))
- def isAnotherJobRunning(jobName: String): JobLog
-
- @Insert(Array("insert into sharp_etl.job_log(job_id, job_name, `period`, workflow_name," +
- "data_range_start, data_range_end," +
- "job_start_time, job_end_time, " +
- "status, create_time," +
- "last_update_time, file, application_id, project_name, load_type, log_driven_type, runtime_args) values " +
- "(#{jobId}, #{jobName}, #{period}, #{workflowName}, " +
- "#{dataRangeStart}, #{dataRangeEnd}, current_timestamp(), current_timestamp(), " +
- "#{status}, current_timestamp(), current_timestamp(), #{file}, #{applicationId}, #{projectName}, #{loadType}, #{logDrivenType}, #{runtimeArgs})"
- ))
- def createJobLog(jobLog: JobLog): Unit
-
- @Update(Array(
- "update sharp_etl.job_log set " +
- "workflow_name = #{workflowName}, " +
- "`period` = #{period}, " +
- "job_name = #{jobName}, " +
- "data_range_start = #{dataRangeStart}, " +
- "data_range_end = #{dataRangeEnd}, " +
- "job_start_time = #{jobStartTime}, " +
- "job_end_time = #{jobEndTime}, " +
- "status = #{status}, " +
- "create_time = #{createTime}, " +
- "last_update_time = #{lastUpdateTime}, " +
- "file = #{file}, " +
- "application_id = #{applicationId}, " +
- "load_type = #{loadType}, " +
- "log_driven_type = #{logDrivenType}, " +
- "project_name = #{projectName}, " +
- "runtime_args = #{runtimeArgs} " +
- "where job_id = #{jobId}"
- ))
- def updateJobLog(jobLog: JobLog): Unit
-
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where workflow_name = #{workflowName} and job_start_time < #{jobStartTime} order by job_start_time desc limit 1"
- ))
- def lastJobLog(@Param("workflowName") workflowName: String, @Param("jobStartTime") jobStartTime: String): JobLog
-
- @Select(Array(
- "select *" +
- " from sharp_etl.job_log where status='SUCCESS' and workflow_name = #{upstreamWFName} and job_id > #{upstreamLogId} order by job_id"
- ))
- def unprocessedUpstreamJobLog(@Param("upstreamWFName") upstreamWFName: String, @Param("upstreamLogId") upstreamLogId: String): Array[JobLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/QualityCheckLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/QualityCheckLogMapper.scala
deleted file mode 100644
index c3f8b9c..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/QualityCheckLogMapper.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.spark
-
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-import org.apache.ibatis.annotations.Insert
-
-trait QualityCheckLogMapper {
- @Insert(Array(
- "insert into sharp_etl.quality_check_log(job_id, job_name, `column`, data_check_type, ids, error_type, warn_count, " +
- "error_count, create_time, last_update_time)",
- "values ",
- "(#{jobId}, #{jobName}, #{column}, #{dataCheckType}, #{ids}, #{errorType}, #{warnCount}, #{errorCount}, #{createTime}, #{lastUpdateTime})"
- ))
- def create(jobError: QualityCheckLog): Unit
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/StepLogMapper.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/StepLogMapper.scala
deleted file mode 100644
index 69a3390..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mapper/spark/StepLogMapper.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mapper.spark
-
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import org.apache.ibatis.annotations._
-
-trait StepLogMapper extends Serializable {
-
- @Insert(Array("insert into sharp_etl.step_log(" +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- ") values (" +
- "#{jobId}, #{stepId}, #{status}, current_timestamp(), current_timestamp(), #{duration}, #{output}, #{error}, " +
- "#{sourceCount}, #{targetCount}, #{successCount}, #{failureCount}, #{sourceType}, #{targetType}" +
- ")"
- ))
- def createStepLog(stepLog: StepLog): Unit
-
- @Update(Array(
- "update sharp_etl.step_log set " +
- "status = #{status}, " +
- "start_time = #{startTime}, " +
- "end_time = current_timestamp(), " +
- "duration = #{duration}, " +
- "output = #{output}, " +
- "error = #{error}, " +
- "source_count = #{sourceCount}, " +
- "target_count = #{targetCount}, " +
- "success_count = #{successCount}, " +
- "failure_count = #{failureCount}, " +
- "source_type = #{sourceType}, " +
- "target_type = #{targetType} " +
- "where job_id = #{jobId} and step_id = #{stepId}"
- ))
- def updateStepLog(stepLog: StepLog): Unit
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id = #{jobId}"
- ))
- def stepLogs(jobId: Long): Array[StepLog]
-
- @Select(Array(
- "select " +
- "job_id, step_id, status, start_time, end_time, duration, output, error, " +
- "source_count, target_count, success_count, failure_count, source_type, target_type" +
- " from sharp_etl.step_log where job_id in" +
- " (select job_id from sharp_etl.job_log " +
- " where job_start_time >= #{startTime}" +
- " and job_start_time < #{endTime})"
- ))
- def stepLogsBetween(@Param("startTime") startTime: String, @Param("endTime") endTime: String): Array[StepLog]
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/JobLog.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/JobLog.scala
deleted file mode 100644
index 61cdcd0..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/JobLog.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.model
-
-import com.github.sharpdata.sharpetl.core.repository.model.JobStatus.{FAILURE, SUCCESS}
-
-import java.time.LocalDateTime
-import scala.beans.BeanProperty
-import scala.collection.mutable
-
-object JobStatus {
- val SUCCESS = "SUCCESS"
- val FAILURE = "FAILURE"
- val RUNNING = "RUNNING"
-}
-
-object Constants {
- val NULL_DATETIME: LocalDateTime = null // scalastyle:ignore
- val NULL_INTEGER: Int = 0
-}
-
-
-class JobLog(
- @BeanProperty
- var jobId: String,
- @BeanProperty
- var workflowName: String,
- @BeanProperty
- var period: Int,
- @BeanProperty
- var jobName: String,
- @BeanProperty
- var dataRangeStart: String,
- @BeanProperty
- var dataRangeEnd: String,
- @BeanProperty
- var jobStartTime: LocalDateTime,
- @BeanProperty
- var jobEndTime: LocalDateTime,
- @BeanProperty
- var status: String,
- @BeanProperty
- var createTime: LocalDateTime,
- @BeanProperty
- var lastUpdateTime: LocalDateTime,
- @BeanProperty
- var loadType: String,
- @BeanProperty
- var logDrivenType: String,
- @BeanProperty
- var file: String,
- @BeanProperty
- var applicationId: String,
- @BeanProperty
- var projectName: String,
- @BeanProperty
- var runtimeArgs: String
- ) extends Serializable {
- private val stepLogs: mutable.Map[String, StepLog] = mutable.Map()
-
- def failed(): Unit = {
- status = FAILURE
- }
-
- def success(): Unit = {
- status = SUCCESS
- }
-
- def getStepLog(stepId: String): StepLog = {
- stepLogs.getOrElseUpdate(stepId, createStepLog(stepId))
- }
-
- def setStepLogs(stepLogs: Array[StepLog]): Unit = {
- for (elem <- stepLogs) {
- this.stepLogs(elem.stepId) = elem
- }
- }
-
- def getStepLogs(): Array[StepLog] = {
- stepLogs.values.toArray.sortBy(_.stepId)
- }
-
- def createStepLog(stepId: String): StepLog = {
- val stepLog = new StepLog(
- jobId = this.jobId,
- stepId = stepId,
- status = JobStatus.RUNNING,
- startTime = LocalDateTime.now(),
- endTime = Constants.NULL_DATETIME,
- duration = Constants.NULL_INTEGER,
- output = "",
- error = "",
- sourceCount = Constants.NULL_INTEGER,
- targetCount = Constants.NULL_INTEGER,
- successCount = Constants.NULL_INTEGER,
- failureCount = Constants.NULL_INTEGER,
- sourceType = "",
- targetType = ""
- )
- stepLogs(stepId) = stepLog
- stepLog
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/QualityCheckLog.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/QualityCheckLog.scala
deleted file mode 100644
index e9c7f95..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/QualityCheckLog.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.model
-
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuid
-
-import java.time.LocalDateTime
-import scala.beans.BeanProperty
-
-//noinspection ScalaStyle
-case class QualityCheckLog(
- @BeanProperty
- var jobId: String,
- @BeanProperty
- var jobName: String,
- @BeanProperty
- var column: String,
- @BeanProperty
- var dataCheckType: String,
- @BeanProperty
- var ids: String,
- @BeanProperty
- var errorType: String,
- @BeanProperty
- var warnCount: Long,
- @BeanProperty
- var errorCount: Long,
- @BeanProperty
- var createTime: LocalDateTime = LocalDateTime.now(),
- @BeanProperty
- var lastUpdateTime: LocalDateTime = LocalDateTime.now(),
- @BeanProperty
- var id: String = uuid
- )
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/StepLog.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/StepLog.scala
deleted file mode 100644
index acc0580..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/model/StepLog.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.model
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.throwableAsString
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog.ERROR_DEFAULT_TRUNCATION
-import org.apache.log4j.Logger
-
-import java.lang.reflect.InvocationTargetException
-import java.time.LocalDateTime
-import java.time.temporal.ChronoUnit
-import scala.beans.BeanProperty
-
-object StepStatus {
- val SUCCESS = "SUCCESS"
- val FAILURE = "FAILURE"
- val RUNNING = "RUNNING"
-}
-
-class StepLog(
- @BeanProperty
- var jobId: String,
- @BeanProperty
- var stepId: String,
- @BeanProperty
- var status: String,
- @BeanProperty
- var startTime: LocalDateTime,
- @BeanProperty
- var endTime: LocalDateTime,
- @BeanProperty
- var duration: Int,
- @BeanProperty
- var output: String,
- @BeanProperty
- var error: String,
- @BeanProperty
- var sourceCount: Integer,
- @BeanProperty
- var targetCount: Integer,
- @BeanProperty
- var successCount: Integer,
- @BeanProperty
- var failureCount: Integer,
- @BeanProperty
- var sourceType: String,
- @BeanProperty
- var targetType: String) {
- val logger: Logger = Logger.getLogger("ETLLogger")
-
- def failed(errorMsg: String): Unit = {
- error(errorMsg)
- status = StepStatus.FAILURE
- endTime = LocalDateTime.now()
- duration = startTime.until(endTime, ChronoUnit.SECONDS).toInt
- }
-
- def failed(error: Throwable): Unit = {
- val prefix = error.getClass.getName + ": "
- val message =
- if (error.getCause != null && error.getCause.isInstanceOf[InvocationTargetException]) {
- // get transformer error
- val e = error.getCause.asInstanceOf[InvocationTargetException]
- e.getCause.getMessage
- } else if (error.getMessage != null && error.getMessage.nonEmpty) {
- error.getMessage
- } else if (error.getCause != null) {
- error.getCause.getMessage
- } else {
- throwableAsString(error)
- }
- failed(prefix + message)
- }
-
- def success(): Unit = {
- status = StepStatus.SUCCESS
- endTime = LocalDateTime.now()
- duration = startTime.until(endTime, ChronoUnit.SECONDS).toInt
- }
-
- def error(str: String): Unit = {
- if (str != null && str.nonEmpty) {
- error = error + "\n[error] " + str.take(ERROR_DEFAULT_TRUNCATION)
- logger.error(str)
- }
- }
-
- def info(str: String): Unit = {
- output = output + "\n[info] " + str.take(ERROR_DEFAULT_TRUNCATION)
- logger.info(str)
- }
-
-}
-
-object StepLog {
- val ERROR_DEFAULT_TRUNCATION = 5000
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/JobLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/JobLogAccessor.scala
deleted file mode 100644
index ff09362..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/JobLogAccessor.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mssql
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.{mssql, mysql}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-import java.time.LocalDateTime.now
-
-class JobLogAccessor() extends repository.JobLogAccessor() {
-
-
- def lastSuccessExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.lastSuccessExecuted(workflowName)
- })
- }
-
- override def lastExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.lastExecuted(workflowName)
- })
- }
-
- def isAnotherJobRunning(jobName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.isAnotherJobRunning(jobName)
- })
- }
-
- override def create(jobLog: JobLog): Unit = {
- super.create(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.createJobLog(jobLog)
- jobLog
- })
- }
-
- override def update(jobLog: JobLog): Unit = {
- super.update(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.updateJobLog(jobLog)
- jobLog
- })
- }
-
- override def updateStatus(jobLog: JobLog): Unit = {
- super.updateStatus(jobLog)
- update(jobLog)
- }
-
- override def getLatestSuccessJobLogByNames(wfNames: Array[String]): Array[JobLog] = {
- wfNames.map(name => {
- this.lastSuccessExecuted(name)
- }).filterNot(_ == null)
- }
-
- override def executionsLastYear(workflowName: String): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.executionsLastYear(workflowName, now().minusYears(1L).format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def executionsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.executionsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getPreviousJobLog(jobLog: JobLog): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.JobLogMapper])
- mapper.lastJobLog(jobLog.workflowName, jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getUnprocessedUpstreamJobLog(upstreamWFName: String, upstreamLogId: BigInt): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mysql.JobLogMapper])
- mapper.unprocessedUpstreamJobLog(upstreamWFName, upstreamLogId.toString())
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/QualityCheckAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/QualityCheckAccessor.scala
deleted file mode 100644
index 6997140..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/QualityCheckAccessor.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mssql
-
-import com.github.sharpdata.sharpetl.core.repository.mapper.mssql.QualityCheckLogMapper
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.mssql
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-
-class QualityCheckAccessor() extends repository.QualityCheckAccessor() {
- def create(log: QualityCheckLog): Unit = {
- execute[QualityCheckLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[QualityCheckLogMapper])
- mapper.create(log)
- log
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/StepLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/StepLogAccessor.scala
deleted file mode 100644
index 876703b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mssql/StepLogAccessor.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mssql
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.mssql
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-
-class StepLogAccessor() extends repository.StepLogAccessor() {
-
- def create(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.StepLogMapper])
- mapper.createStepLog(stepLog)
- stepLog
- })
- }
-
- def update(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.StepLogMapper])
- mapper.updateStepLog(stepLog)
- stepLog
- })
- }
-
- def stepLogs(jobId: Long): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.StepLogMapper])
- mapper.stepLogs(jobId)
- })
- }
-
- def stepLogsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mssql.StepLogMapper])
- mapper.stepLogsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/JobLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/JobLogAccessor.scala
deleted file mode 100644
index edf4014..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/JobLogAccessor.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.mapper.mysql.JobLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-import java.time.LocalDateTime.now
-
-class JobLogAccessor() extends repository.JobLogAccessor() {
-
-
- def lastSuccessExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastSuccessExecuted(workflowName)
- })
- }
-
- override def lastExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastExecuted(workflowName)
- })
- }
-
- def isAnotherJobRunning(jobName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.isAnotherJobRunning(jobName)
- })
- }
-
- override def create(jobLog: JobLog): Unit = {
- super.create(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.createJobLog(jobLog)
- jobLog
- })
- }
-
- override def update(jobLog: JobLog): Unit = {
- super.update(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.updateJobLog(jobLog)
- jobLog
- })
- }
-
- override def updateStatus(jobLog: JobLog): Unit = {
- super.updateStatus(jobLog)
- update(jobLog)
- }
-
- override def getLatestSuccessJobLogByNames(wfNames: Array[String]): Array[JobLog] = {
- wfNames.map(name => {
- this.lastSuccessExecuted(name)
- }).filterNot(_ == null)
- }
-
- override def executionsLastYear(workflowName: String): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsLastYear(workflowName, now().minusYears(1L).format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def executionsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getPreviousJobLog(jobLog: JobLog): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastJobLog(jobLog.workflowName, jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getUnprocessedUpstreamJobLog(upstreamWFName: String, upstreamLogId: BigInt): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.unprocessedUpstreamJobLog(upstreamWFName, upstreamLogId.toString())
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/QualityCheckAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/QualityCheckAccessor.scala
deleted file mode 100644
index 65d57fc..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/QualityCheckAccessor.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mysql
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.mysql
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-
-class QualityCheckAccessor() extends repository.QualityCheckAccessor() {
- def create(log: QualityCheckLog): Unit = {
- execute[QualityCheckLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[mysql.QualityCheckLogMapper])
- mapper.create(log)
- log
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/StepLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/StepLogAccessor.scala
deleted file mode 100644
index 9332736..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/mysql/StepLogAccessor.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.mapper.mysql.StepLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.{mssql, mysql}
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-
-class StepLogAccessor() extends repository.StepLogAccessor() {
-
- def create(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.createStepLog(stepLog)
- stepLog
- })
- }
-
- def update(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.updateStepLog(stepLog)
- stepLog
- })
- }
-
- def stepLogs(jobId: Long): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogs(jobId)
- })
- }
-
- def stepLogsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/JobLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/JobLogAccessor.scala
deleted file mode 100644
index 6757639..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/JobLogAccessor.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.spark
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.spark.JobLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-import java.time.LocalDateTime.now
-
-class JobLogAccessor() extends repository.JobLogAccessor() {
-
-
- def lastSuccessExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastSuccessExecuted(workflowName)
- })
- }
-
- override def lastExecuted(workflowName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastExecuted(workflowName)
- })
- }
-
- def isAnotherJobRunning(jobName: String): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.isAnotherJobRunning(jobName)
- })
- }
-
- override def create(jobLog: JobLog): Unit = {
- super.create(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.createJobLog(jobLog)
- jobLog
- })
- }
-
- override def update(jobLog: JobLog): Unit = {
- super.update(jobLog)
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.updateJobLog(jobLog)
- jobLog
- })
- }
-
- override def updateStatus(jobLog: JobLog): Unit = {
- super.updateStatus(jobLog)
- update(jobLog)
- }
-
- override def getLatestSuccessJobLogByNames(wfNames: Array[String]): Array[JobLog] = {
- wfNames.map(name => {
- this.lastSuccessExecuted(name)
- }).filterNot(_ == null)
- }
-
- override def executionsLastYear(workflowName: String): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsLastYear(workflowName, now().minusYears(1L).format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def executionsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.executionsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getPreviousJobLog(jobLog: JobLog): JobLog = {
- execute[JobLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.lastJobLog(jobLog.workflowName, jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-
- override def getUnprocessedUpstreamJobLog(upstreamWFName: String, upstreamLogId: BigInt): Array[JobLog] = {
- execute[Array[JobLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[JobLogMapper])
- mapper.unprocessedUpstreamJobLog(upstreamWFName, upstreamLogId.toString())
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/QualityCheckAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/QualityCheckAccessor.scala
deleted file mode 100644
index 14415f4..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/QualityCheckAccessor.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.spark
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.spark
-import com.github.sharpdata.sharpetl.core.repository.model.QualityCheckLog
-
-class QualityCheckAccessor() extends repository.QualityCheckAccessor() {
- def create(log: QualityCheckLog): Unit = {
- execute[QualityCheckLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[spark.QualityCheckLogMapper])
- mapper.create(log)
- log
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/StepLogAccessor.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/StepLogAccessor.scala
deleted file mode 100644
index d5f292b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/repository/spark/StepLogAccessor.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.github.sharpdata.sharpetl.core.repository.spark
-
-import com.github.sharpdata.sharpetl.core.repository
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession.execute
-import com.github.sharpdata.sharpetl.core.repository.mapper.spark.StepLogMapper
-import com.github.sharpdata.sharpetl.core.repository.model.StepLog
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-
-import java.time.LocalDateTime
-
-class StepLogAccessor() extends repository.StepLogAccessor() {
-
- def create(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.createStepLog(stepLog)
- stepLog
- })
- }
-
- def update(stepLog: StepLog): Unit = {
- execute[StepLog](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.updateStepLog(stepLog)
- stepLog
- })
- }
-
- def stepLogs(jobId: Long): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogs(jobId)
- })
- }
-
- def stepLogsBetween(startTime: LocalDateTime, endTime: LocalDateTime): Array[StepLog] = {
- execute[Array[StepLog]](sessionValue => {
- val mapper = sessionValue.getMapper(classOf[StepLogMapper])
- mapper.stepLogsBetween(startTime.format(L_YYYY_MM_DD_HH_MM_SS), endTime.format(L_YYYY_MM_DD_HH_MM_SS))
- })
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/AST.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/AST.scala
deleted file mode 100644
index b2a3830..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/AST.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations._
-import com.github.sharpdata.sharpetl.core.notification.NotifyConfig
-
-@Evolving(since = "1.0.0")
-final case class Notify(notifyType: String, recipients: String, notifyCondition: String) {
- def toConfigs(): Seq[NotifyConfig] = {
- recipients
- .split(",")
- .map(_.trim)
- .map(recipient => NotifyConfig(notifyType, recipient, notifyCondition))
- }
-}
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Formatable.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Formatable.scala
deleted file mode 100644
index 0620488..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Formatable.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import org.apache.commons.lang3.reflect.FieldUtils
-
-import scala.collection.mutable
-import scala.jdk.CollectionConverters._
-
-@Stable(since = "1.0.0")
-class Formatable extends Serializable {
-
- override def toString: String = format(this)
-
- // scalastyle:off
- final def format(obj: Any): String = {
- obj match {
- case value: String => value
- case _ =>
- FieldUtils
- .getAllFieldsList(obj.getClass)
- .asScala
- .filterNot(it => it.getName.contains("$init$") || it.getName.contains("$outer") || it.getName.contains("$jacoco"))
- .map(field => {
- field.setAccessible(true)
- val value = field.get(this)
- value match {
- case null => ""
- case _: String => s"-- ${field.getName}=$value"
- case _: Integer => s"-- ${field.getName}=$value"
- case args: mutable.Map[String, Any] =>
- if (args.nonEmpty) {
- (List("-- args") ++ args.map { case (key, value) => s"-- $key=$value" }).mkString("\n")
- } else ""
- case options: Map[String, String] =>
- if (options.nonEmpty) {
- (List("-- options") ++ options.map { case (key, value) => s"-- $key=$value" }).mkString("\n")
- } else ""
- case _ => s"-- ${field.getName}\n" + value.toString.replaceAll("-- ", "-- ")
- }
- }).filterNot(isNullOrEmpty).mkString("\n")
- }
- }
- // scalastyle:on
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WFParseResult.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WFParseResult.scala
deleted file mode 100644
index ae3aca9..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WFParseResult.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.WorkFlowSyntaxException
-import fastparse.{IndexedParserInput, Parsed, ParserInput}
-
-
-sealed trait WFParseResult {
- def isSuccess: Boolean
-
- def get: Workflow
-}
-
-case class WFParseSuccess(wf: Workflow) extends WFParseResult {
- override def isSuccess: Boolean = true
-
- override def get: Workflow = wf
-}
-
-case class WFParseFail(parsed: Parsed.Failure) extends WFParseResult {
- override def toString: String = {
- parsed match {
- case Parsed.Failure(label, failIndex, extra) =>
- val trace = extra.trace()
- val last = trace.stack.last
- val input: ParserInput = trace.input
- val pair = input.prettyIndex(last._2).split(":")
- val row = pair.head.toInt
- val col = pair.tail.head.toInt
-
- val line = {
- val lines = trace.input.asInstanceOf[IndexedParserInput].data.split("\n")
- if (lines.size <= row) {
- lines.last
- } else {
- lines(row - 1)
- }
- }
- val offending =
- s"${row.toString map { _ => ' ' }}|${" " * (col - 1)}^"
- s"""$row:$col: error: ${description(input, trace.stack, failIndex)}
- |$row|$line
- |$offending""".stripMargin
- }
- }
-
- def description(input: ParserInput, stack: List[(String, Int)], index: Int): String = {
- s"""Expected parse by `${stack.reverse.head._1}` at ${input.prettyIndex(stack.reverse.head._2)}, but found ${formatTrailing(input, index)}.
- |Parse stack is ${formatStack(input, stack)}""".stripMargin
- }
-
- def formatStack(input: ParserInput, stack: List[(String, Int)]): String = {
- stack.map { case (s, i) => s"$s:${input.prettyIndex(i)}" }.mkString(" / ")
- }
-
- def formatTrailing(input: ParserInput, index: Int): String = {
- fastparse.internal.Util.literalize(input.slice(index, index + 10))
- }
-
- override def isSuccess: Boolean = false
-
- override def get: Workflow = throw WorkFlowSyntaxException(this.toString)
-}
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Workflow.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Workflow.scala
deleted file mode 100644
index cbb29f3..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/Workflow.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Evolving
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-
-@Evolving(since = "1.0.0")
-final case class Workflow(
- name: String,
- period: String,
- loadType: String,
- logDrivenType: String,
- upstream: String,
- dependsOn: String,
- comment: String,
- timeout: Int,
- defaultStart: String,
- stopScheduleWhenFail: Boolean,
- notifies: Seq[Notify],
- options: Map[String, String],
- var steps: List[WorkflowStep]
- ) extends Formatable {
- def getProjectName(): String = Option(options).map(_.getOrElse("projectName", "default")).getOrElse("default")
-
- // scalastyle:off
- override def toString: String = {
- val builder = new StringBuilder()
- builder.append(headerStr)
- builder.append(steps.mkString("\n"))
- builder.toString()
- }
-
- def headerStr: String = {
- val builder = new StringBuilder()
- builder.append(s"-- workflow=$name$ENTER")
- if (!StringUtil.isNullOrEmpty(period)) builder.append(s"-- period=$period$ENTER")
- if (!StringUtil.isNullOrEmpty(loadType)) builder.append(s"-- loadType=$loadType$ENTER")
- if (!StringUtil.isNullOrEmpty(logDrivenType)) builder.append(s"-- logDrivenType=$logDrivenType$ENTER")
- if (!StringUtil.isNullOrEmpty(upstream)) builder.append(s"-- upstream=$upstream$ENTER")
- if (!StringUtil.isNullOrEmpty(dependsOn)) builder.append(s"-- dependsOn=$dependsOn$ENTER")
- if (!StringUtil.isNullOrEmpty(comment)) builder.append(s"-- comment=$comment$ENTER")
- if (!StringUtil.isNullOrEmpty(defaultStart)) builder.append(s"-- defaultStart=$defaultStart$ENTER")
- if (timeout > 1) builder.append(s"-- timeout=$timeout$ENTER")
- if (stopScheduleWhenFail) builder.append(s"-- stopScheduleWhenFail=$stopScheduleWhenFail$ENTER")
- if (notifies != null && notifies.nonEmpty) {
- notifies.foreach { notify =>
- builder.append(s"-- notify$ENTER")
- builder.append(s"-- notifyType=${notify.notifyType}$ENTER")
- builder.append(s"-- recipients=${notify.recipients}$ENTER")
- builder.append(s"-- notifyCondition=${notify.notifyCondition}$ENTER")
- }
- }
- builder.append(optionsToString)
- builder.append("\n")
- builder.toString()
- }
-
- def optionsToString: String = {
- if (options != null && options.nonEmpty) {
- val builder = new StringBuilder()
- builder.append(s"-- options$ENTER")
- options.foreach { case (key, value) => builder.append(s"-- $key=$value$ENTER") }
- builder.toString()
- } else {
- ""
- }
- }
-
- // scalastyle:on
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParser.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParser.scala
deleted file mode 100644
index 4235177..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParser.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import fastparse._
-import NoWhitespace._
-import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.github.sharpdata.sharpetl.core.datasource.config.{DataSourceConfig, TransformationDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.annotation.AnnotationScanner.{configRegister, defaultConfigType, tempConfig}
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Experimental
-import com.github.sharpdata.sharpetl.core.syntax.ParserUtils.{Until, objectMapper, trimSql}
-
-
-object WorkflowParser {
-
- def key[$: P]: P0 = P(CharIn("a-z", "A-Z", "0-9", "_", "."))
-
- def singleLineValue[$: P]: P[String] = Until(newline | End).!
-
- def multiLineValue[$: P](indent: Int): P[String] =
- (P("|") ~/ newline ~ Until((anyComment ~ key) | End))
- .map(value => {
- val replace = multiLineStart(indent)
- value.split("\n").map(_.replace(replace, "")).mkString("\n")
- })
-
- private def multiLineStart(indent: Int) = {
- "--" + Range.apply(0, indent).map(_ => " ").mkString + "|"
- }
-
- def newline[$: P]: P0 = P("\n" | "\r\n" | "\r" | "\f")
-
- def newlines[$: P]: P0 = newline.rep
-
- def whitespace[$: P]: P0 = P(" " | "\t" | newline)
-
- def comment[$: P](indent: Int): P0 = P("--" ~ " ".rep(exactly = indent)) ~ !" "
-
- def anyComment[$: P]: P0 = P("--" ~ " ".rep)
-
- def otherPart[$: P]: P0 = P("step=" | "source=" | "target=" | "args" ~ newline | "options" ~ newline | "conf" ~ newline | "loopOver=")
-
- def keyValPair[$: P](indent: Int): P[(String, String)] =
- comment(indent) ~ !otherPart ~ P(key.rep(1).!) ~ "=" ~ P(multiLineValue(indent) | singleLineValue)
-
- def keyValPairs[$: P](indent: Int): P[Seq[(String, String)]] = keyValPair(indent).rep(sep = newlines)
-
- def stepHeader[$: P]: P0 = P("-- step=")
-
- def sql[$: P]: P[String] = Until(stepHeader | End)
-
- def nestedObj[$: P](objName: String, indent: Int): P[Map[String, String]] = P(
- comment(indent) ~ P(objName) ~ newlines
- ~ keyValPairs(indent + 1)
- ).map(_.toMap)
-
- def notifies[$: P](indent: Int): P[Seq[Map[String, String]]] = notify(indent).rep(sep = newlines)
-
- def notify[$: P](indent: Int): P[Map[String, String]] = nestedObj("notify", indent)
-
- def options[$: P](indent: Int): P[Map[String, String]] = nestedObj("options", indent)
-
- def conf[$: P](indent: Int): P[Map[String, String]] = nestedObj("conf", indent)
-
- def loopOver[$: P]: P[String] = P("-- loopOver=") ~ singleLineValue.!
-
- def dataSource[$: P](`type`: String): P[DataSourceConfig] = P(
- s"-- ${`type`}=" ~/ key.rep.! ~ newlines
- ~ keyValPairs(2) ~ newlines
- ~ options(2).?
- ).map {
- case (typeValue, kv, options) =>
- val value = Map(
- "dataSourceType" -> typeValue.toLowerCase
- ) ++ kv.toMap ++ Map("options" -> options.getOrElse(Map()))
- val clazz: Class[DataSourceConfig] = configRegister.getOrElse(typeValue.toLowerCase, defaultConfigType)
- val json = objectMapper.writeValueAsString(value)
- objectMapper.readValue(json, clazz)
- }
-
- def args[$: P]: P[(String, String)] = P("--" ~ " ".rep(min = 2, max = 3)) ~ !otherPart ~ P(key.rep(1).!) ~ "=" ~ singleLineValue.!
-
- def transformer[$: P](`type`: String): P[DataSourceConfig] = P(
- s"-- ${`type`}=transformation" ~/ newlines
- ~ args.rep(sep = newlines)
- ).map { kv =>
- val map = kv.toMap
- val value = Map(
- "dataSourceType" -> "transformation",
- "className" -> map.getOrElse("className", ""),
- "methodName" -> map.getOrElse("methodName", ""),
- "transformerType" -> map.getOrElse("transformerType", "")
- ) ++ Map("args" -> map.filterKeys(it => it != "className" && it != "methodName" && it != "transformerType" && it != "dataSourceType").toMap)
- val json = objectMapper.writeValueAsString(value)
- objectMapper.readValue(json, classOf[TransformationDataSourceConfig])
- }
-
- def steps[$: P]: P[Seq[WorkflowStep]] = step.rep(sep = newlines, min = 1)
-
- def step[$: P]: P[WorkflowStep] = P(
- newlines ~ stepHeader ~ singleLineValue ~ newlines
- ~ P(transformer("source") | dataSource("source")).? ~ newlines
- ~ P(transformer("target") | dataSource("target")) ~ newlines
- ~ keyValPairs(1).? ~ newlines
- ~ conf(1).? ~ newlines
- ~ loopOver.? ~ newlines
- ~ sql
- ).map {
- // scalastyle:off
- case (step, sourceOptional, target, kv, conf, loopOverOptional, sql) =>
- val map = kv.getOrElse(Seq()).toMap
- val workflowStep = new WorkflowStep
- workflowStep.step = step
- workflowStep.source = sourceOptional.getOrElse(tempConfig)
- workflowStep.target = target
- workflowStep.sqlTemplate = trimSql(sql)
- workflowStep.persist = map.getOrElse("persist", null)
- workflowStep.checkPoint = map.getOrElse("checkPoint", null)
- workflowStep.writeMode = map.getOrElse("writeMode", null)
- workflowStep.skipFollowStepWhenEmpty = map.getOrElse("skipFollowStepWhenEmpty", null) //TODO: drop this later
- workflowStep.conf = conf.getOrElse(Map())
- workflowStep.loopOver = loopOverOptional.orNull
- workflowStep
- // WorkflowStep(step, source, target, sql.map(_.trim),
- // map.getOrElse("persist", null), map.getOrElse("checkpoint", null),
- // map.getOrElse("writeMode", null),
- // opts.getOrElse(("", Map[String, String]()))._2)
- // scalastyle:on
- }
-
-
- def workflow[$: P]: P[Workflow]
- = P(
- Start
- ~ whitespace.rep
- ~ "-- workflow" ~/ "=" ~/ singleLineValue ~ newlines
- ~ keyValPairs(2) ~/ newlines
- ~ options(2).? ~/ newlines
- ~ notifies(2).? ~/ newlines
- ~ steps
- ~ End
- ).map { case (name, kv, options, notifies, steps) =>
- val value = kv.toMap + ("name" -> name) + ("options" -> options.getOrElse(Map())) + ("notifies" -> notifies.getOrElse(Seq()))
- val json = objectMapper.writeValueAsString(value)
- val wf = objectMapper.readValue(json, classOf[Workflow])
- wf.steps = steps.toList
- wf
- }
-
- @Experimental(message = "experimental workflow parser", since = "1.0.0")
- def parseWorkflow(text: String): WFParseResult = {
- parse(text, workflow(_)) match {
- case Parsed.Success(value, _) => WFParseSuccess(value)
- case Parsed.Failure(label, failIndex, extra) => WFParseFail(Parsed.Failure(label, failIndex, extra))
- }
- }
-}
-
-private object ParserUtils {
-
- val objectMapper = new ObjectMapper().registerModule(DefaultScalaModule)
- objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
- objectMapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true)
- objectMapper.configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, false)
-
- // scalastyle:off
- def Until(p: => P[_])(implicit ctx: P[_]): P[String] = {
- (!p ~ AnyChar).rep.! ~ &(p)
- }
- // scalastyle:on
-
- def trimSql(sql: String): String = {
- val trim = sql.trim
- if (trim.endsWith(";")) {
- trim.slice(0, trim.length - 1)
- } else {
- trim
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowStep.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowStep.scala
deleted file mode 100644
index 68e6a19..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowStep.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.github.sharpdata.sharpetl.core.util.Constants.{BooleanString, WriteMode}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Evolving
-import com.github.sharpdata.sharpetl.core.datasource.config.DataSourceConfig
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-
-import scala.beans.BeanProperty
-
-@Evolving(since = "1.0.0")
-final case class WorkflowStep(
- @BeanProperty
- var step: String = null, //scalastyle:ignore
-
- var source: DataSourceConfig = null, //scalastyle:ignore
-
- var target: DataSourceConfig = null, //scalastyle:ignore
-
- @BeanProperty
- var sql: String = null, //scalastyle:ignore
-
- @BeanProperty
- var sqlTemplate: String = null, //scalastyle:ignore
-
- // repartition creates new partitions and does a full shuffle. default none.
- @BeanProperty
- var repartition: String = null, //scalastyle:ignore
-
- // uses existing partitions to minimize the amount of data that's shuffled. default none.
- @BeanProperty
- var coalesce: String = null, //scalastyle:ignore
-
- /**
- * 是否需要缓存本步骤执行结果 需指定缓存级别 可选级别如下:
- * NONE
- * DISK_ONLY
- * DISK_ONLY_2
- * MEMORY_ONLY
- * MEMORY_ONLY_2
- * MEMORY_ONLY_SER
- * MEMORY_ONLY_SER_2
- * MEMORY_AND_DISK
- * MEMORY_AND_DISK_2
- * MEMORY_AND_DISK_SER
- * MEMORY_AND_DISK_SER_2
- */
- @BeanProperty
- var persist: String = "MEMORY_AND_DISK",
-
- // 是否需要对本步骤执行结果保存local checkpoint,默认不进行checkpoint
- @BeanProperty
- var checkPoint: String = BooleanString.FALSE,
-
- /**
- * 输出模式,可选类型参照 [[WriteMode]]
- */
- @BeanProperty
- var writeMode: String = null, //scalastyle:ignore
-
- // 是否需要在获取到空数据时报错
- @BeanProperty
- var throwExceptionIfEmpty: String = BooleanString.FALSE,
-
- // 是否使用目标表的schema,简化source表没有显示定义schema时的配置
- @BeanProperty
- var isUseTargetSchema: String = BooleanString.FALSE,
-
- // 是否跳过后面步骤当数据或文件为空
- @BeanProperty
- var skipFollowStepWhenEmpty: String = BooleanString.FALSE,
-
- @BeanProperty
- var loopOver: String = null, //scalastyle:ignore
-
- @BeanProperty
- var conf: Map[String, String] = Map[String, String]()
- ) extends Formatable {
-
- def getSourceConfig[T <: DataSourceConfig]: T = source.asInstanceOf[T]
-
- def setSourceConfig(sourceConfig: DataSourceConfig): Unit = {
- this.source = sourceConfig
- }
-
- def getTargetConfig[T <: DataSourceConfig]: T = target.asInstanceOf[T]
-
- def setTargetConfig(targetConfig: DataSourceConfig): Unit = {
- this.target = targetConfig
- }
-
-
- override def toString: String = {
- val builder = new StringBuilder()
- builder.append(s"-- step=$step$ENTER")
- builder.append(s"-- source=${source.dataSourceType}$ENTER")
- if (!isNullOrEmpty(source.toString.trim)) builder.append(s"${source.toString.trim}$ENTER")
- builder.append(s"-- target=${target.dataSourceType}$ENTER")
- if (!isNullOrEmpty(target.toString.trim)) builder.append(s"${target.toString.trim}$ENTER")
- if (!StringUtil.isNullOrEmpty(repartition)) builder.append(s"-- repartition=$repartition$ENTER")
- if (!StringUtil.isNullOrEmpty(coalesce)) builder.append(s"-- coalesce=$coalesce$ENTER")
- if (!StringUtil.isNullOrEmpty(persist) && persist != "MEMORY_AND_DISK") builder.append(s"-- writeMode=$persist$ENTER")
- if (!StringUtil.isNullOrEmpty(checkPoint) && checkPoint != "false") builder.append(s"-- checkPoint=$checkPoint$ENTER")
- if (!StringUtil.isNullOrEmpty(loopOver)) builder.append(s"-- loopOver=$loopOver$ENTER")
- buildOptionsString(builder)
- buildFileOptionString(builder)
- builder.toString()
- }
-
- // scalastyle:off
- def buildOptionsString(builder: StringBuilder): Unit = {
- if (!StringUtil.isNullOrEmpty(writeMode)) builder.append(s"-- writeMode=$writeMode$ENTER")
- if (!StringUtil.isNullOrEmpty(throwExceptionIfEmpty) && throwExceptionIfEmpty == BooleanString.TRUE) {
- builder.append(s"-- throwExceptionIfEmpty=$throwExceptionIfEmpty$ENTER")
- }
- if (!StringUtil.isNullOrEmpty(isUseTargetSchema) && isUseTargetSchema == BooleanString.TRUE) {
- builder.append(s"-- isUseTargetSchema=$isUseTargetSchema$ENTER")
- }
- if (conf.nonEmpty) {
- builder.append(s"-- conf$ENTER")
- conf.foreach { case (key, value) => builder.append(s"-- $key=$value$ENTER") }
- }
- if (!StringUtil.isNullOrEmpty(sql)) {
- builder.append(s"${sql.trim};$ENTER")
- } else {
- if (!StringUtil.isNullOrEmpty(sqlTemplate)) builder.append(s"${sqlTemplate.trim};$ENTER")
- }
- }
- // scalastyle:on
-
- def buildFileOptionString(builder: StringBuilder): Unit = {
- if (!StringUtil.isNullOrEmpty(skipFollowStepWhenEmpty) && skipFollowStepWhenEmpty == BooleanString.TRUE) {
- builder.append(s"-- skipFollowStepWhenEmpty=$skipFollowStepWhenEmpty$ENTER")
- }
-
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/test/FakeWorkflowInterpreter.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/test/FakeWorkflowInterpreter.scala
deleted file mode 100644
index fddbff9..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/test/FakeWorkflowInterpreter.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.github.sharpdata.sharpetl.core.test
-
-import com.github.sharpdata.sharpetl.core.api.{Variables, WorkflowInterpreter}
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityCheckResult, QualityCheckRule}
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-
-
-// $COVERAGE-OFF$
-class FakeWorkflowInterpreter extends WorkflowInterpreter[Seq[_]] {
- override def listFiles(step: WorkflowStep): List[String] = List()
-
- override def deleteSource(step: WorkflowStep): Unit = ()
-
- override def readFile(step: WorkflowStep, jobLog: JobLog,
- variables: Variables,
- files: List[String]): Seq[_] = List()
-
- override def executeWrite(jobLog: JobLog, df: Seq[_], step: WorkflowStep, variables: Variables): Unit = ()
-
- override def executeRead(step: WorkflowStep, jobLog: JobLog, variables: Variables): Seq[_] = Seq()
-
- override val qualityCheckAccessor: QualityCheckAccessor = new com.github.sharpdata.sharpetl.core.repository.mysql.QualityCheckAccessor()
-
- override val dataQualityCheckRules: Map[String, QualityCheckRule] = Map()
-
- override def createView(df: Seq[_], tempViewName: String): Unit = ???
-
- override def dropView(tempViewName: String): Unit = ???
-
- override def execute(sql: String): Seq[_] = ???
-
- override def queryCheckResult(sql: String): Seq[DataQualityCheckResult] = ???
-
- override def applicationId(): String = "fake-app-001"
-
- override def dropUnusedCols(df: Seq[_], cols: String): Seq[_] = ???
-
- override def union(left: Seq[_], right: Seq[_]): Seq[_] = left ++ right
-}
-// $COVERAGE-ON$
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/CodecUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/CodecUtil.scala
deleted file mode 100644
index 7c21070..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/CodecUtil.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.Constants.IO_COMPRESSION_CODEC_CLASS.{GZC_CODEC_CLASS, GZ_CODEC_CLASS}
-
-object CodecUtil {
- def matchCodec(extension: String): Option[String] = {
- extension match {
- case ".gz.c" => Some(GZC_CODEC_CLASS)
- case ".gz" => Some(GZ_CODEC_CLASS)
- case _ => None
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Constants.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Constants.scala
deleted file mode 100644
index a770e72..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Constants.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import java.time.LocalDateTime
-import scala.util.matching.Regex
-
-object Constants {
- object Job {
- // We should not use null in scala, but mybatis could not process Option type.
- // So I centralized the null only in this place
- val nullDataTime: LocalDateTime = null // scalastyle:ignore
- }
-
- object Environment {
- var CURRENT: String = _
- val LOCAL = "local"
- val DEV = "dev"
- val QA = "qa"
- val PROD = "prod"
- val TEST = "test"
- val EMBEDDED_HIVE = "embedded-hive"
- }
-
- object PathPrefix {
- val FILE = ""
- val HDFS = "hdfs"
- val DBFS = "dbfs"
- val OSS = "oss"
- }
-
- object Encoding {
- val UTF8 = "UTF-8"
- val GBK = "GBK"
- val ISO_8859_1 = "ISO-8859-1"
- }
-
- object Separator {
- val COMMA = ","
- val ENTER = "\n"
- }
-
- object BooleanString {
- val TRUE = true.toString
- val FALSE = false.toString
- }
-
- object ETLDatabaseType {
- val MYSQL = "mysql"
- val MSSQL = "mssql"
- val H2 = "h2"
- val SPARK_SHARP_ETL = "spark_sharp_etl"
- val FLINK_SHARP_ETL = "flink_sharp_etl"
- }
-
- object DataSourceType extends Serializable {
- val TRANSFORMATION = "transformation"
- val CSV: String = "csv"
- val CONSOLE: String = "console"
- val VARIABLES: String = "variables"
- val HIVE: String = "hive"
- val SPARK_SQL: String = "spark_sql"
- val ORACLE: String = "oracle"
- val MYSQL: String = "mysql"
- val MS_SQL_SERVER: String = "ms_sql_server"
- val POSTGRES: String = "postgres"
- val H2: String = "h2"
- val TEMP: String = "temp"
- val ES: String = "es"
- // 直接在 kudu 中建的 kudu 表,直接使用 kudu 中的表名
- val KUDU: String = "kudu"
- // 在 impala 中建的 kudu 表,使用 impala 中的表名(与 kudu 中实际的表名不同)
- val IMPALA_KUDU: String = "impala_kudu"
- val IMPALA: String = "impala"
- val MOUNT: String = "mount"
- val FTP: String = "ftp"
- val HDFS: String = "hdfs"
- val DELTA_LAKE: String = "delta_lake"
- val SCP: String = "scp"
- val SFTP: String = "sftp"
- val JSON: String = "json"
- val EXCEL: String = "excel"
- val BATCH_KAFKA: String = "batch_kafka"
- val STREAMING_KAFKA: String = "streaming_kafka"
- // object 和 class 在注册 udf 时需要配置
- // 大多数情况使用 object 即可,使用 class 的场景参考 com.github.sharpdata.sharpetl.spark.udf.PmmlUDF
- val OBJECT = "object"
- val CLASS = "class"
- // pmml 模型加载并注册 udf(特殊的 udf)
- val PMML = "pmml"
- val UDF = "udf"
- val DO_NOTHING = "do_nothing"
- val INFORMIX = "informix"
- val COMPRESSTAR = "compresstar"
- val BIGQUERY: String = "bigquery"
- val TEXT = "text"
- }
-
- object WriteMode {
- val OVER_WRITE: String = "overwrite"
- val APPEND: String = "append"
- val UPSERT: String = "upsert"
- val DELETE: String = "delete"
- val EXECUTE: String = "execute"
- val MERGE_WRITE: String = "mergewrite"
- }
-
- object IncrementalType {
- val DIFF: String = "diff"
- val AUTO_INC_ID: String = "auto_inc_id"
- val KAFKA_OFFSET: String = "kafka_offset"
- val UPSTREAM: String = "upstream"
- val TIMEWINDOW: String = "timewindow"
- }
-
- object IO_COMPRESSION_CODEC_CLASS {
- val GZ_CODEC_CLASS: String = "org.apache.hadoop.io.compress.GzipCodec"
- val GZC_CODEC_CLASS: String = "com.github.sharpdata.sharpetl.spark.extension.CGzipCodecExtension"
- val DEFAULT_CODEC_CLASS: String = "org.apache.hadoop.io.compress.DefaultCodec"
-
- val IO_COMPRESSION_CODEC_CLASS_NAMES: String = String.join(
- ",",
- GZ_CODEC_CLASS,
- GZC_CODEC_CLASS,
- DEFAULT_CODEC_CLASS
- )
- }
-
- /**
- * 解析resoucese/task中sql文件的正则表达式
- */
- object Pattern {
- val REPARTITION_NUM_PATTERN: Regex = """^[1-9][0-9]*$""".r
- val REPARTITION_COLUMNS_PATTERN: Regex = """^[a-zA-Z_][0-9a-zA-Z_]*(,[a-zA-Z_][0-9a-zA-Z_]*)*$""".r
- val REPARTITION_NUM_COLUMNS_PATTERN: Regex = """^[1-9][0-9]*(,[a-zA-Z_][0-9a-zA-Z_]*)+$""".r
- }
-
- object JdbcDataType {
- val VARCHAR = "varchar"
- val VARCHAR2 = "varchar2"
- val BPCHAR = "bpchar"
- val TEXT = "text"
- val JSONB = "jsonb"
- val CHAR = "char"
-
- val TIMESTAMP = "timestamp"
- val TIMESTAMPTZ = "timestamptz"
- val DATE = "date"
- val DATETIME = "datetime"
-
- val NUMERIC = "numeric"
- val NUMBER = "number"
-
- val INT4 = "int4"
- val INT8 = "int8"
- val BIGINT = "bigint"
- val INT = "int"
- val BIT = "bit"
- val TINYINT = "tinyint"
- val DOUBLE = "double"
- val ROWID = "rowid"
- val DECIMAL = "decimal"
- }
-
- object LoadType {
- val FULL = "full"
- val INCREMENTAL = "incremental"
- }
-
- object TransformerType {
- val OBJECT_TYPE = "object"
- val CLASS_TYPE = "class"
- val DYNAMIC_OBJECT_TYPE = "dynamic_object"
- }
-
- object PeriodType {
- val DAY = 1440
- val HOUR = 60
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/DateUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/DateUtil.scala
deleted file mode 100644
index ef30212..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/DateUtil.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.StringUtil.BigIntConverter
-
-import java.math.BigInteger
-import java.text.SimpleDateFormat
-import java.time.LocalDateTime
-import java.time.format.DateTimeFormatter
-
-object DateUtil {
- val YYYY_MM_DD_HH_MM_SS = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
- val L_YYYY_MM_DD_HH_MM_SS = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
- val INT_YYYY_MM_DD_HH_MM_SS = DateTimeFormatter.ofPattern("yyyyMMddHHmmss")
- // datetime format in job_log table(for `data_range_start` & `data_range_end`)
- val YYYYMMDDHHMMSS = DateTimeFormatter.ofPattern("yyyyMMddHHmmss")
- val SPARK_JSON_DATETIME = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSSSS")
-
- implicit class LocalDateTimeToBigInt(localDateTime: LocalDateTime) {
- def asBigInt(): BigInteger = localDateTime.format(YYYYMMDDHHMMSS).asBigInt
- }
-
- implicit class BigIntToLocalDateTime(value: BigInteger) {
- def asLocalDateTime(): LocalDateTime = LocalDateTime.parse(value.toString(), YYYYMMDDHHMMSS)
- }
-
- def formatDate(source: String, sourceFormat: SimpleDateFormat, targetFormat: SimpleDateFormat): String = {
- targetFormat.format(sourceFormat.parse(source))
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLConfig.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLConfig.scala
deleted file mode 100644
index d516b2a..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLConfig.scala
+++ /dev/null
@@ -1,199 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.exception.Exception.CanNotLoadPropertyFileException
-import com.github.sharpdata.sharpetl.core.util.Constants.{Encoding, Environment}
-import org.apache.commons.codec.binary.Base64
-import org.jasypt.encryption.StringEncryptor
-import org.jasypt.encryption.pbe.StandardPBEStringEncryptor
-import org.jasypt.properties.EncryptableProperties
-
-import java.io.Reader
-import java.nio.charset.CodingErrorAction
-import java.util.Properties
-import scala.collection.mutable
-import scala.io.{Codec, Source}
-import scala.jdk.CollectionConverters._
-
-object ETLConfig {
-
- private var prop: Option[Properties] = None
-
- var extraParam: mutable.Map[String, String] = mutable.Map()
-
- private var propertyPath: String = s"/application${StringUtil.environmentSuffix}.properties"
-
- private def rawProperties: Properties = {
- try {
- val p = new Properties()
- ETLLogger.info(s"Read raw properties $propertyPath")
- p.load(getPropertiesInputStream)
- replaceEnvVariable(p)
- p.asScala ++= (extraParam)
- p
- } catch {
- case e: Throwable =>
- throw CanNotLoadPropertyFileException(s"$propertyPath cannot be loaded", e)
- }
- }
-
- def encryptor: Option[StringEncryptor] = {
- val rawProp = rawProperties
- if (rawProp.containsKey("encrypt.algorithm")) {
- val encryptor = new StandardPBEStringEncryptor
- encryptor.setAlgorithm(rawProp.getOrDefault("encrypt.algorithm", "PBEWithMD5AndDES").toString)
- encryptor.setPassword(new String(Base64.decodeBase64(getEncryptKey), "UTF-8"))
- Some(encryptor)
- } else {
- None
- }
- }
-
- private def getEncryptKey: String = {
- var keyPath = rawProperties.getProperty("encrypt.keyPath", "")
- if ("".equals(keyPath)) {
- keyPath = this.getClass.getClassLoader.getResource("etl.key").getPath
- }
- val offset = rawProperties.getProperty("encrypt.offset", "10").toInt
- val reader = if (keyPath.contains("hdfs")) {
- HDFSUtil.getBytesDataReader(keyPath)
- } else {
- IOUtil.getBytesDataReader(keyPath)
- }
-
- val len = reader.readInt()
- val bytesBuffer = new Array[Byte](len)
- for (index <- 0 until len) bytesBuffer(index) = (reader.readInt() - offset).toByte
- reader.close()
-
- new String(bytesBuffer, "UTF-8").reverse
-
- }
-
- def plainProperties: Map[String, String] = {
- properties.asScala.map { case (key, _) =>
- (
- key,
- properties.getProperty(key) //NOTE: we must get key so we could get plain text properties value
- )
- }.toMap
- }
-
- /**
- * Call [[ETLConfig.getProperty]] to get plain/decrypted value.
- * If you call `properties.asScala.filter/toList/etc` you may got encrypted value.
- * If you want all plain text values or you want to filter properties by your prefix, please using [[ETLConfig.plainProperties]].
- */
- private def properties: Properties = {
- if (prop.isDefined) {
- prop.get
- } else {
- val p = encryptor.map(strEncryptor => new EncryptableProperties(strEncryptor)).orElse(Some(new Properties())).get
- ETLLogger.info(s"Read properties $propertyPath")
- try {
- p.load(getPropertiesInputStream)
- replaceEnvVariable(p)
- p.asScala ++= (extraParam)
- prop = Some(p)
- p
- } catch {
- case e: Throwable =>
- throw CanNotLoadPropertyFileException(s"$propertyPath cannot be loaded", e)
- }
- }
- }
-
- def reInitProperties(): Unit = {
- prop = None
- extraParam = mutable.Map()
- }
-
- private def getPropertiesInputStream: Reader = {
- implicit val codec: Codec = Codec(Encoding.UTF8)
- codec.onMalformedInput(CodingErrorAction.REPLACE)
- codec.onUnmappableCharacter(CodingErrorAction.REPLACE)
-
- val source =
- if (propertyPath.toLowerCase.startsWith("file")) {
- Source.fromURL(propertyPath)
- } else if (propertyPath.toLowerCase.startsWith("hdfs")) {
- Source.fromInputStream(HDFSUtil.readFile(propertyPath))
- } else if (propertyPath.toLowerCase.startsWith("oss")) {
- Source.fromInputStream(OssUtil.readFile(propertyPath))
- } else {
- Source.fromURL(getClass.getResource(propertyPath))
- }
- source.reader()
- }
-
- private def replaceEnvVariable(prop: Properties): Unit = {
- val env = System.getenv()
- prop.asScala.foreach(tuple => {
- if (tuple._2.startsWith("$")) {
- prop.put(tuple._1, env.get(tuple._2.substring(1)))
- }
- })
- }
-
- def getHttpProperties(connectionName: String): Map[String, String] = {
- val prefix = s"$connectionName.http."
- plainProperties
- .filter(_._1.startsWith(prefix))
- .map { case (key, value) => key.substring(prefix.length, key.length) -> value }
- }
-
- def getSparkProperties(wfName: String): Map[String, String] = {
- getProperties("spark", "default") ++ getProperties("spark", wfName)
- }
-
- def getFlinkProperties(wfName: String): Map[String, String] = {
- getProperties("flink", "default") ++ getProperties("flink", wfName)
- }
-
- def getKafkaProperties: String = {
- properties.asScala.getOrElse("kafka.restapi", "")
- }
-
- def getProperties(paramType: String, jobName: String): Map[String, String] = {
- val prefix = s"$paramType.$jobName."
- plainProperties
- .filter(_._1.startsWith(prefix))
- .map { case (key, value) => key.substring(prefix.length, key.length) -> value }
- }
-
- def setPropertyPath(path: String, env: String = ""): Unit = {
- Environment.CURRENT = env
- if (!isNullOrEmpty(path)) {
- propertyPath = path
- } else {
- propertyPath = s"/application${StringUtil.environmentSuffix}.properties"
- }
- prop = None
- }
-
- def getProperty(key: String): String = {
- val value = this.properties.getProperty(key)
- if (isNullOrEmpty(value)) {
- ETLLogger.error(s"[Config] property key $key was not found in properties file!")
- //throw new RuntimeException(s"[Config] property key $key was not found in properties file!")
- }
- value
- }
-
- def getProperties(pathPrefix: String): Map[String, String] = {
- val prefix = s"$pathPrefix."
- plainProperties
- .filter(_._1.startsWith(prefix))
- .map { case (key, value) => key.substring(prefix.length, key.length) -> value }
- }
-
- def getProperty(key: String, defaultValue: String): String = {
- this.properties.getProperty(key, defaultValue)
- }
-
- lazy val jobIdColumn: String = ETLConfig.getProperty("etl.default.jobId.column", "job_id")
- lazy val jobTimeColumn: String = ETLConfig.getProperty("etl.default.jobTime.column", "job_time")
- lazy val partitionColumn: String = ETLConfig.getProperty("etl.default.partition.column", "dt")
- lazy val incrementalDiffModeDataLimit: String = ETLConfig.getProperty("etl.default.incrementalDiff.limit", "5000000")
- lazy val purgeHiveTable: String = ETLConfig.getProperty("etl.default.purgeHiveTable", "none")
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLLogger.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLLogger.scala
deleted file mode 100644
index 6d7a6f1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ETLLogger.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import org.apache.log4j.Logger
-
-object ETLLogger extends Serializable {
- val logger: Logger = Logger.getLogger("ETLLogger")
-
- @inline def debug(msg: String): Unit = {
- logger.debug(msg)
- }
-
- @inline def info(msg: String): Unit = {
- logger.info(msg)
- }
-
- @inline def error(msg: String): Unit = {
- logger.error(msg)
- }
-
- @inline def error(msg: String, t: Throwable): Unit = {
- logger.error(msg, t)
- }
-
- @inline def warn(msg: String): Unit = {
- logger.warn(msg)
- }
-
- @inline def fatal(msg: String): Unit = {
- logger.fatal(msg)
- }
-
- @inline def fatal(msg: String, t: Throwable): Unit = {
- logger.fatal(msg, t)
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ExcelUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ExcelUtil.scala
deleted file mode 100644
index 4639e08..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ExcelUtil.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.Constants.PathPrefix
-import com.github.sharpdata.sharpetl.core.exception.Exception.{CellNotFoundException, SheetNotFoundException}
-import com.github.sharpdata.sharpetl.core.util.IOUtil.getFullPath
-import org.apache.poi.ss.usermodel.{Cell, Row, Sheet}
-import org.apache.poi.xssf.usermodel.XSSFWorkbook
-
-import java.io.{File, FileInputStream}
-import scala.jdk.CollectionConverters._
-
-object ExcelUtil {
- def getBoolCell(idx: Int, line: Row): Boolean = {
- Option(line.getCell(idx)).exists(_.getBooleanCellValue)
- }
-
- // get boolean cell or false
- def getBoolCell(header: String, line: Row)(implicit headerMapping: Map[String, Int]): Boolean = {
- Option(getCellByName(line, header)).exists(_.getBooleanCellValue)
- }
-
- def getNumericCell(index: Int, line: Row): Double = {
- line.getCell(index).getNumericCellValue
- }
-
- def getNumericCell(header: String, line: Row)(implicit headerMapping: Map[String, Int]): Double = {
- getCellByName(line, header).getNumericCellValue
- }
-
- def getStringCell(header: String, line: Row)(implicit headerMapping: Map[String, Int]): String = {
- scala.util.Try {
- getCellByName(line, header).getStringCellValue
- }.toOption.orNull
- }
-
- def getStringCellOrNull(index: Int, line: Row): String = {
- Option(line.getCell(index)).map(_.getStringCellValue).filterNot(_.isEmpty).orNull
- }
-
- def getStringCellOrNull(header: String, line: Row)(implicit headerMapping: Map[String, Int]): String = {
- Option(getStringCell(header, line)).filterNot(_.isEmpty).orNull
- }
-
- def getStringCellOrDefault(header: String, line: Row, default: String)(implicit headerMapping: Map[String, Int]): String = {
- Option(getStringCell(header, line)).filterNot(_.isEmpty).getOrElse(default)
- }
-
- def getCellByName(row: Row, headerName: String)(implicit headerMapping: Map[String, Int]): Cell = {
- if (!headerMapping.isDefinedAt(headerName)) {
- throw CellNotFoundException(headerName)
- }
- row.getCell(headerMapping(headerName))
- }
-
- def readHeaders(headerRow: Row): Map[String, Int] = {
- headerRow
- .asScala
- .map(row => row.getStringCellValue -> row.getColumnIndex)
- .toMap
- }
-
- private def readSheet(sheet: Sheet): Seq[Row] = {
- sheet.iterator().asScala.filter(line => line.asScala.mkString("").nonEmpty).toList
- }
-
- def readSheetByName(workBook: XSSFWorkbook, sheetName: String): Seq[Row] = {
- val sheet = workBook.getSheet(sheetName)
- if (sheet == null) {
- throw SheetNotFoundException(s"Sheet name: $sheetName not found in workbook.")
- } else {
- readSheet(sheet)
- }
- }
-
- def readWorkBook(filePath: String): XSSFWorkbook = {
- val excelFile = if (filePath.startsWith(PathPrefix.HDFS) || filePath.startsWith(PathPrefix.DBFS)) {
- HDFSUtil.readFile(filePath)
- } else {
- val path = getFullPath(filePath)
- new FileInputStream(new File(path))
- }
- new XSSFWorkbook(excelFile)
- }
-
- def readSheet(filePath: String, sheetName: String): Seq[Row] = {
- readSheetByName(readWorkBook(filePath), sheetName)
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/FlywayUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/FlywayUtil.scala
deleted file mode 100644
index 80344d0..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/FlywayUtil.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import org.flywaydb.core.Flyway
-
-object FlywayUtil {
- def migrate(): Unit = {
-
- val flyway = if (ETLConfig.getProperty("flyway.url").toLowerCase().contains("jdbc:sqlserver:")) {
- // MS Sql Server
- Flyway
- .configure
- .schemas("sharp_etl")
- .defaultSchema("sharp_etl")
- .createSchemas(true)
- .locations("db/sqlserver/migration")
- .dataSource(
- ETLConfig.getProperty("flyway.url"),
- ETLConfig.getProperty("flyway.username"),
- ETLConfig.getProperty("flyway.password"))
- .load()
- } else if (ETLConfig.getProperty("flyway.url").toLowerCase().contains("jdbc:spark_sharp_etl:")) {
- Flyway
- .configure
- .locations("db/spark/migration")
- .defaultSchema("sharp_etl")
- .createSchemas(false)
- //.baselineVersion("0")
- //.baselineOnMigrate(true)
- .dataSource(
- ETLConfig.getProperty("flyway.url"),
- ETLConfig.getProperty("flyway.username"),
- ETLConfig.getProperty("flyway.password"))
- .load()
- } else if (ETLConfig.getProperty("flyway.url").toLowerCase().contains("jdbc:flink_sharp_etl:")) {
- Flyway
- .configure
- .locations("db/flink/migration")
-// .defaultSchema(ETLConfig.getProperty("flyway.catalog", "paimon") + "." + ETLConfig.getProperty("flyway.database", "sharp_etl"))
- .defaultSchema(ETLConfig.getProperty("flyway.database", "sharp_etl"))
- .createSchemas(false)
- //.baselineVersion("0")
- //.baselineOnMigrate(true)
- .dataSource(
- ETLConfig.getProperty("flyway.url"),
- "none",
- "none")
- .load()
- } else {
- // MySQL
- Flyway
- .configure
- .locations("db/mysql/migration")
- .dataSource(
- ETLConfig.getProperty("flyway.url"),
- ETLConfig.getProperty("flyway.username"),
- ETLConfig.getProperty("flyway.password"))
- .load()
- }
- flyway.migrate()
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/HDFSUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/HDFSUtil.scala
deleted file mode 100644
index b8b9890..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/HDFSUtil.scala
+++ /dev/null
@@ -1,338 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.RemoteFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.exception.Exception.NoFileFoundException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.IO_COMPRESSION_CODEC_CLASS.IO_COMPRESSION_CODEC_CLASS_NAMES
-import com.github.sharpdata.sharpetl.core.util.Constants.{BooleanString, DataSourceType}
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs._
-import org.apache.hadoop.io.IOUtils
-import org.apache.hadoop.io.compress.{CompressionCodec, CompressionCodecFactory}
-
-import java.io._
-import java.nio.file.attribute.PosixFilePermissions
-import java.time.{LocalDateTime, ZoneId}
-import scala.collection.mutable.{ArrayBuffer, ListBuffer}
-
-// $COVERAGE-OFF$
-object HDFSUtil {
-
- var conf: Configuration = _
-
- {
- conf = new Configuration()
- conf.setBoolean("fs.hdfs.impl.disable.cache", true)
- conf.setBoolean("dfs.support.append", true)
- conf.set(CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY, IO_COMPRESSION_CODEC_CLASS_NAMES)
- }
-
- def getFileSystem(configuration: Configuration = conf): FileSystem = {
- try {
- FileSystem.get(configuration)
- } catch {
- case e: Exception =>
- ETLLogger.error("Init FileSystem failed.", e)
- throw e
- }
- }
-
- def closeFileSystem(fs: FileSystem): Unit = {
- if (fs != null) {
- try {
- fs.close()
- } catch {
- case e: Exception =>
- ETLLogger.error("Close FileSystem failed.", e)
- }
- }
- }
-
- def recursiveListFiles(path: String): ListBuffer[String] = {
- val fs = getFileSystem()
- val list = recursiveListFiles(fs, new Path(path))
- closeFileSystem(fs)
- list
- }
-
- def recursiveListFiles(fs: FileSystem, path: Path): ListBuffer[String] = {
- val paths = new ListBuffer[String]()
- if (fs.exists(path)) {
- if (fs.isFile(path)) {
- paths += path.toString
- } else {
- paths ++= fs
- .listStatus(path)
- .flatMap(fileStatus => recursiveListFiles(fs, fileStatus.getPath))
- }
- }
- paths
- }
-
- def listFileStatus(path: String): Seq[FileStatus] = {
- val fs = getFileSystem()
- val list = fs.listStatus(new Path(path))
- closeFileSystem(fs)
- list
- }
-
- def extractFileName(path: String): String = {
- val slash = path.lastIndexOf(Path.SEPARATOR)
- path.substring(slash + 1)
- }
-
- def readFile(filePath: String, fs: FileSystem = getFileSystem()): InputStream = {
- fs.open(new Path(filePath))
- }
-
- def getBytesDataReader(path: String): DataInputStream = {
- new DataInputStream(readFile(path))
- }
-
- def readLines(path: String): List[String] = {
- val fs = getFileSystem()
- val lines = readInputStreamInToLines(fs.open(new Path(path)), path)
- closeFileSystem(fs)
- lines
- }
-
- def readInputStreamInToLines(in: InputStream, path: String): List[String] = {
- val bufferedReader = try {
- new BufferedReader(new InputStreamReader(in))
- } catch {
- case e: IOException =>
- ETLLogger.error(s"Open InputStreamReader with path $path failed.", e)
- throw e
- }
-
- val lines = new ArrayBuffer[String]()
- // scalastyle:off
- var line: String = null
- // scalastyle:on
- while ( {
- line = bufferedReader.readLine();
- Option(line).isDefined
- }) {
- lines += line
- }
-
- try {
- bufferedReader.close()
- } catch {
- case e: IOException =>
- ETLLogger.error("Close BufferedReader failed.", e)
- }
- lines.toList
- }
-
- def listFileUrl(dir: String, fileNamePattern: String): List[String] = {
- val fs = getFileSystem()
- val list = listFileUrl(fs, dir, fileNamePattern)
- closeFileSystem(fs)
- list
- }
-
- def listFileUrl(fs: FileSystem, dir: String, fileNamePattern: String): List[String] = {
- val dirPath = new Path(dir)
- if (fs.exists(dirPath)) {
- fs
- .listStatus(
- dirPath,
- new PathFilter {
- override def accept(
- path: Path): Boolean = fileNamePattern.r.findFirstMatchIn(path.getName).isDefined
- }
- )
- .map(_.getPath.toString)
- .toList
- } else {
- List[String]()
- }
- }
-
- def exists(path: String): Boolean = {
- val fs = getFileSystem()
- fs.exists(new Path(path))
- }
-
- def delete(path: String, recursive: Boolean = true): Unit = {
- val fs = getFileSystem()
- delete(fs, path, recursive)
- closeFileSystem(fs)
- }
-
- def delete(fs: FileSystem, path: String, recursive: Boolean): Unit = {
- delete(fs, new Path(path), recursive)
- }
-
- def delete(fs: FileSystem, path: Path, recursive: Boolean): Unit = {
- if (fs.exists(path)) {
- fs.delete(path, recursive)
- ETLLogger.info(s"Delete file '$path' success.")
- } else {
- ETLLogger.info(s"File '$path' not exists.")
- }
- }
-
- def mkdirs(dir: String): Unit = {
- val fs = getFileSystem()
- mkdirs(fs, dir)
- closeFileSystem(fs)
- }
-
- def mkdirs(fs: FileSystem, dir: String): Unit = {
- fs.mkdirs(new Path(dir))
- }
-
- def mv(src: String, target: String, overWrite: Boolean): Unit = {
- val fs = getFileSystem()
- mv(fs, src, target, overWrite)
- closeFileSystem(fs)
- }
-
- def mv(fs: FileSystem, src: String, target: String, overWrite: Boolean): Unit = {
- mv(fs, new Path(src), new Path(target), overWrite)
- }
-
- def mv(fs: FileSystem, src: Path, target: Path, overWrite: Boolean): Unit = {
- if (overWrite) {
- delete(fs, target, recursive = true)
- }
- val mvResult = try {
- fs.rename(src, target)
- } catch {
- case e: FileAlreadyExistsException =>
- ETLLogger.error(s"Rename file '$src' to '$target' failed, target path '$target' has already exists.")
- throw e
- case e: Exception =>
- ETLLogger.error(e.getMessage)
- throw e
- }
- if (mvResult) {
- ETLLogger.info(s"Rename file '$src' to '$target' success.")
- } else {
- throw new RuntimeException(s"Rename file '$src' to '$target' failed.")
- }
- }
-
-
- def put(src: String, dst: String): Unit = {
- put(src, dst, "", decompress = false)
- }
-
- def put(
- src: String,
- dst: String,
- extension: String,
- decompress: Boolean): Unit = {
- val fs = getFileSystem()
- put(fs, src, dst, extension, decompress)
- closeFileSystem(fs)
- }
-
- def put(
- fs: FileSystem,
- src: String,
- dst: String,
- extension: String,
- decompress: Boolean): Unit = {
- val fileInputStream = new FileInputStream(src)
- val codec = getCodecByExtension(extension)
- val in = if (decompress && codec.isDefined) {
- codec.get.createInputStream(new FileInputStream(src))
- } else {
- fileInputStream
- }
- ETLLogger.info(s"put local file '$src' to HDFS '$dst'")
- val out = fs.create(new Path(dst))
- IOUtils.copyBytes(in, out, conf, true)
- }
-
- def getCodecByExtension(codecExtension: String): Option[CompressionCodec] = {
- val factory = new CompressionCodecFactory(conf)
- val codecClassName = CodecUtil.matchCodec(codecExtension)
- if (codecClassName.isDefined) {
- Some(factory.getCodecByClassName(codecClassName.get))
- } else {
- None
- }
- }
-
- def moveFromLocal(src: String, des: String): Unit = {
- val fs = getFileSystem()
- moveFromLocal(fs, src, des)
- }
-
- def append(dst: String, content: String): Unit = {
- val fs = getFileSystem()
- val path = new Path(dst)
- val out: FSDataOutputStream = fs.append(path)
- out.writeBytes(content)
- out.hflush()
- out.close()
- }
-
- def moveFromLocal(fs: FileSystem, src: String, des: String): Unit = {
- ETLLogger.info(s"Uploading from local path $src to HDFS path $des...")
- fs.moveFromLocalFile(new Path(src), new Path(des))
- }
-
- def downloadFileToHDFS(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables): List[String] = {
- downloadRemoteFilesToLocal(step, jobLog, variables)
-
- val dataSourceConfig = step.getSourceConfig[RemoteFileDataSourceConfig]
- val hdfsPaths = ListBuffer[String]()
- if (!isNullOrEmpty(jobLog.file)) {
- jobLog.file.split(",")
- .foreach(fileName => {
- val hdfsFilePath = StringUtil.concatFilePath(dataSourceConfig.hdfsDir, fileName)
- HDFSUtil.moveFromLocal(
- StringUtil.concatFilePath(dataSourceConfig.tempDestinationDir, fileName),
- hdfsFilePath)
- hdfsPaths += hdfsFilePath
- }
- )
- ETLLogger.info(s"Uploaded file to HDFS ${hdfsPaths.mkString(",")}")
- }
- hdfsPaths.toList
- }
-
- def downloadRemoteFilesToLocal(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables): Unit = {
- val dataSourceConfig = step.getSourceConfig[RemoteFileDataSourceConfig]
- val startTime = LocalDateTime.parse(variables("${DATA_RANGE_START}"), L_YYYY_MM_DD_HH_MM_SS).atZone(ZoneId.of(dataSourceConfig.timeZone)).toEpochSecond
- val endTime = LocalDateTime.parse(variables("${DATA_RANGE_END}"), L_YYYY_MM_DD_HH_MM_SS).atZone(ZoneId.of(dataSourceConfig.timeZone)).toEpochSecond
- val permission = step.getSourceConfig[RemoteFileDataSourceConfig].tempDestinationDirPermission
- val permissions = PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString(permission)).value()
-
- if (!new File(dataSourceConfig.tempDestinationDir).exists()) {
- new File(dataSourceConfig.tempDestinationDir).mkdirs()
- }
- val fileNames = dataSourceConfig.getDataSourceType match {
- case DataSourceType.SFTP =>
- SFTPUtil.downloadFiles(step, dataSourceConfig.configPrefix, dataSourceConfig.sourceDir,
- dataSourceConfig.tempDestinationDir, startTime, endTime, permissions)
- case DataSourceType.MOUNT =>
- MountUtil.moveFiles(step, dataSourceConfig.sourceDir, dataSourceConfig.tempDestinationDir,
- dataSourceConfig.timeZone, startTime, endTime, permissions)
- case _ => ???
- }
- if (fileNames == null || fileNames.isEmpty) {
- if (dataSourceConfig.breakFollowStepWhenEmpty == BooleanString.TRUE) {
- throw NoFileFoundException(step.step)
- }
- ETLLogger.warn("No files need download, and current config `breakFollowStepWhenEmpty` is false, so the job will continue the next steps.")
- } else {
- jobLog.file = fileNames.mkString(",")
- ETLLogger.info("Downloaded file to local")
- }
- }
-}
-// $COVERAGE-ON$
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IOUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IOUtil.scala
deleted file mode 100644
index 00090b8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IOUtil.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.Constants.Encoding
-
-import java.io.{DataInputStream, File, FileInputStream, FileWriter, InputStream, PrintWriter}
-import java.nio.charset.CodingErrorAction
-import java.util.jar.JarFile
-import scala.collection.mutable
-import scala.collection.mutable.ArrayBuffer
-import scala.io.{BufferedSource, Codec, Source}
-
-object IOUtil {
-
- def delete(path: String): Unit = {
- val file = new File(path)
- if (file.exists()) {
- file.delete()
- }
- }
-
- def mkdirs(dir: String): Unit = {
- val file = new File(dir)
- if (!file.exists()) {
- file.mkdirs()
- }
- }
-
- def write(path: String, line: String, append: Boolean = false): Unit = {
- val file = new File(path)
- val writer = new PrintWriter(new FileWriter(file, append))
- writer.println(line)
- writer.close()
- }
-
- def readFile(path: String): InputStream = {
- try {
- new FileInputStream(path)
- } catch {
- case e: Throwable => throw new RuntimeException(s"unable to read file from $path", e)
- }
- }
-
- def getBytesDataReader(path: String): DataInputStream = {
- new DataInputStream(readFile(path))
- }
-
- def readLinesFromText(path: String, charset: String = Encoding.UTF8): List[String] = {
- try {
- val source = Source.fromFile(path, charset)
- val input = source.getLines().toList
- source.close()
- input
- } catch {
- case _: java.io.FileNotFoundException => readProcessConfigFromJar(path)
- }
- }
-
- def recursiveListFiles(path: String): ArrayBuffer[String] = {
- val list = ArrayBuffer[String]()
- val file = new File(path)
- if (file != null && file.exists()) {
- if (file.isDirectory) {
- list ++= file.listFiles().flatMap(f => recursiveListFiles(f.getPath))
- } else {
- list += file.getPath
- }
- }
- list
- }
-
- def listFiles(pathName: String): List[String] = {
- val list = ArrayBuffer[String]()
- val absolutePaths = this.getClass.getClassLoader.getResources(pathName)
- while (absolutePaths.hasMoreElements) {
- val resourcePath = absolutePaths.nextElement().getPath
- list ++= recursiveListFiles(resourcePath).toList
- }
- list.toList
- }
-
- def listFilesJar(configRootDir: String): List[String] = {
- val path = this.getClass.getProtectionDomain.getCodeSource.getLocation.getPath
- val jarFile = new File(path)
- if (jarFile.isDirectory) {
- // scalastyle:off
- return List()
- // scalastyle:on
- }
- val localJarFile = new JarFile(jarFile)
- val entries = localJarFile.entries()
- val result = mutable.ListBuffer[String]()
- while (entries.hasMoreElements) {
- val jarEntry = entries.nextElement()
- val innerPath = jarEntry.getName
- if (innerPath.startsWith(configRootDir)) {
- result.append(innerPath)
- }
- }
- result.toList
- }
-
- def recursiveListFilesFromResource(pathName: String): List[String] = {
- (listFiles(pathName) ++ listFilesJar(pathName))
- .filter(it => it.contains(".sql") || it.contains(".scala"))
- }
-
- def readLinesFromInputStream(inputStream: InputStream): List[String] = {
- assert(inputStream != null)
- var source: Option[BufferedSource] = None
- try {
- implicit val codec: Codec = Codec(Encoding.UTF8)
- codec.onMalformedInput(CodingErrorAction.REPLACE)
- codec.onUnmappableCharacter(CodingErrorAction.REPLACE)
- source = Option(Source.fromInputStream(inputStream))
- val input = source.get.getLines().toList
- input
- } catch {
- case e: Exception =>
- ETLLogger.error(s"read file failed.")
- throw e
- } finally {
- if (source.isDefined) {
- try {
- source.get.close()
- } catch {
- case e: Exception =>
- ETLLogger.error(s"close BufferedSource failed.", e)
- }
- }
- }
- }
-
- def readLinesFromResource(path: String): List[String] = {
- val inputStream = this.getClass.getClassLoader.getResourceAsStream(path)
- readLinesFromInputStream(inputStream)
- }
-
- def getInputStreamFromJar(filePath: String): InputStream = {
- val inputStream = this.getClass.getClassLoader.getResourceAsStream(
- filePath
- )
- if (inputStream == null) {
- throw new RuntimeException(s"Not found file '$filePath' in jar.")
- }
- inputStream
- }
-
- def readProcessConfigFromJar(fileName: String): List[String] = {
- val inputStream = getInputStreamFromJar(fileName)
- readLinesFromInputStream(inputStream)
- }
-
- def getFullPath(path: String): String =
- if (path.startsWith("~")) {
- //user home dir
- val home: String = System.getProperty("user.home")
- home + path.replaceFirst("~", "")
- } else {
- path
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IncIdUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IncIdUtil.scala
deleted file mode 100644
index c0523c1..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/IncIdUtil.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-object IncIdUtil {
- implicit class NumberStringPadding(value: String) {
- // scalastyle:off
- def padding(size: Int = 12): String = {
- value.reverse.padTo(size, '0').reverse
- }
-
- // taken from https://stackoverflow.com/a/2800839/5597803
- def trimPadding(): String = {
- value.replaceFirst("^0+(?!$)", "")
- }
- // scalastyle:on
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JDBCUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JDBCUtil.scala
deleted file mode 100644
index 07b244b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JDBCUtil.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-object JDBCUtil {
- lazy val dbType: String = {
- val jdbcUrl = ETLConfig.getProperty("flyway.url").toLowerCase()
- if (jdbcUrl.contains(":sqlserver:")) {
- Constants.ETLDatabaseType.MSSQL
- } else if (jdbcUrl.contains("jdbc:h2")) {
- Constants.ETLDatabaseType.H2
- } else if (jdbcUrl.contains("spark_sharp_etl")) {
- Constants.ETLDatabaseType.SPARK_SHARP_ETL
- } else if (jdbcUrl.contains("flink_sharp_etl")) {
- Constants.ETLDatabaseType.FLINK_SHARP_ETL
- } else {
- Constants.ETLDatabaseType.MYSQL
- }
- }
-}
-
-object JdbcDefaultOptions {
- val PARTITION_NUM = 8
- val BATCH_SIZE = 1024
-}
-
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JobLogUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JobLogUtil.scala
deleted file mode 100644
index 10a9206..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/JobLogUtil.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.repository.model.{JobLog, JobStatus}
-import com.github.sharpdata.sharpetl.core.util.Constants.PeriodType.{DAY, HOUR}
-import com.github.sharpdata.sharpetl.core.util.Constants.{DataSourceType, IncrementalType}
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{L_YYYY_MM_DD_HH_MM_SS, YYYYMMDDHHMMSS}
-
-import java.time.LocalDateTime
-
-object JobLogUtil {
-
- implicit class JogLogExternal(jobLog: JobLog) {
-
- def dataFlow(): String = {
- if (jobLog.getStepLogs().isEmpty) {
- ""
- } else {
- val stepLogs = jobLog.getStepLogs()
- val filterStepLogs = stepLogs.filter(stepLog =>
- stepLog.targetType.nonEmpty
- && stepLog.targetType != DataSourceType.DO_NOTHING
- && stepLog.targetType != DataSourceType.VARIABLES
- && stepLog.targetType != DataSourceType.TEMP)
-
- if (filterStepLogs.length == 1) {
- val head = filterStepLogs.head
- s"${head.sourceType}(${head.sourceCount}) -> ${head.targetType}(${head.successCount})"
- } else {
- filterStepLogs
- .map(stepLog => s"${stepLog.targetType}(${stepLog.successCount})")
- .mkString(" -> ")
- }
- }
- }
-
- def failStep(): String = {
- if (jobLog.status != JobStatus.FAILURE || jobLog.getStepLogs().isEmpty) {
- ""
- } else {
- jobLog.getStepLogs().reverse.head.stepId
- }
- }
-
- def errorMessage(): String = {
- if (jobLog.status != JobStatus.FAILURE || jobLog.getStepLogs().isEmpty) {
- ""
- } else {
- jobLog.getStepLogs().reverse.head.error.replace("\n", " ")
- }
- }
-
- def duration(): Int = {
- if (jobLog.getStepLogs().isEmpty) {
- 0
- } else {
- jobLog.getStepLogs().map(_.duration).sum
- }
- }
- }
-
- implicit class JobLogFormatter(jobLog: JobLog) {
- def formatDataRangeStart(): String =
- jobLog.logDrivenType match {
- case IncrementalType.AUTO_INC_ID => jobLog.dataRangeStart
- case IncrementalType.KAFKA_OFFSET => jobLog.dataRangeStart
- case IncrementalType.UPSTREAM => jobLog.dataRangeStart
- case _ => LocalDateTime.parse(jobLog.dataRangeStart, YYYYMMDDHHMMSS).format(L_YYYY_MM_DD_HH_MM_SS)
- }
-
- def formatDataRangeEnd(): String =
- jobLog.logDrivenType match {
- case IncrementalType.AUTO_INC_ID => jobLog.dataRangeEnd
- case IncrementalType.KAFKA_OFFSET => jobLog.dataRangeEnd
- case IncrementalType.UPSTREAM => jobLog.dataRangeEnd
- case _ => LocalDateTime.parse(jobLog.dataRangeEnd, YYYYMMDDHHMMSS).format(L_YYYY_MM_DD_HH_MM_SS)
- }
-
- def jobTimeBase(jobLog: JobLog): Boolean = {
- !jobLog.logDrivenType.equals(IncrementalType.AUTO_INC_ID) &&
- !jobLog.logDrivenType.equals(IncrementalType.KAFKA_OFFSET) &&
- !jobLog.logDrivenType.equals(IncrementalType.UPSTREAM)
- }
-
- def defaultTimePartition(): scala.collection.mutable.Map[String, String] = {
- val timePartitionArg = scala.collection.mutable.Map[String, String]()
-
- if (jobLog.logDrivenType == null || jobTimeBase(jobLog)) {
- val startDate = LocalDateTime.parse(jobLog.dataRangeStart, YYYYMMDDHHMMSS)
- timePartitionArg.put("${YEAR}", startDate.getYear.toString)
- val month = startDate.getMonthValue
- if (month < 10) {
- timePartitionArg.put("${MONTH}", s"0$month")
- } else {
- timePartitionArg.put("${MONTH}", month.toString)
- }
- if (jobLog.period % DAY == 0) {
- val day = startDate.getDayOfMonth
- if (day < 10) {
- timePartitionArg.put("${DAY}", s"0$day")
- } else {
- timePartitionArg.put("${DAY}", day.toString)
- }
- } else if (jobLog.period % HOUR == 0) {
- val hour = startDate.getHour
- if (hour < 10) {
- timePartitionArg.put("${HOUR}", s"0$hour")
- } else {
- timePartitionArg.put("${HOUR}", hour.toString)
- }
- } else {
- val minute = startDate.getMinute
- if (minute < 10) {
- timePartitionArg.put("${MINUTE}", s"0$minute")
- } else {
- timePartitionArg.put("${MINUTE}", minute.toString)
- }
- }
- }
- timePartitionArg
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Memo.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Memo.scala
deleted file mode 100644
index 6cbb3e0..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Memo.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import java.util.concurrent.ConcurrentHashMap
-import scala.jdk.CollectionConverters._
-
-final case class Memo1[A, B](f: A => B) extends (A => B) {
- private[this] val cache = new ConcurrentHashMap[A, B].asScala
-
- def apply(a: A): B = cache getOrElseUpdate(a, {
- f(a)
- })
-}
-
-final case class Memo2[A, B, C](f: (A, B) => C) extends ((A, B) => C) {
- private[this] val cache = new ConcurrentHashMap[(A, B), C]().asScala
-
- def apply(a: A, b: B): C = cache getOrElseUpdate((a, b), {
- f(a, b)
- })
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/MountUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/MountUtil.scala
deleted file mode 100644
index de3cf6c..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/MountUtil.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.datasource.config.RemoteFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-import org.apache.commons.io.FileUtils
-
-import java.io.File
-import java.nio.charset.StandardCharsets
-import java.nio.file.attribute.PosixFilePermission
-import java.nio.file.{Files, StandardCopyOption}
-import java.time.ZoneId
-import java.util
-import java.util.regex.Pattern
-
-object MountUtil {
-
- def moveFiles(step: WorkflowStep,
- sourceDir: String,
- destinationDir: String,
- timeZone: String,
- startTime: Long,
- endTime: Long,
- permissions: util.Set[PosixFilePermission]): List[String] = {
- val remoteFileDataSourceConfig = step.getSourceConfig[RemoteFileDataSourceConfig]
- new File(sourceDir)
- .listFiles()
- .filter { file =>
- val modifiedTime = Files.getLastModifiedTime(file.toPath).toInstant.atZone(ZoneId.of(timeZone)).toEpochSecond
- file.isFile && remoteFileDataSourceConfig.filterByTime == BooleanString.TRUE && modifiedTime >= startTime && modifiedTime < endTime
- }
- .map { file =>
- val fileName = file.getName
- ETLLogger.info(s"Downloading file: $fileName to $destinationDir")
- val targetFile = new File(StringUtil.concatFilePath(destinationDir, fileName))
- if (remoteFileDataSourceConfig.dos2unix.equals(BooleanString.TRUE)) {
- doc2unix(file, targetFile, permissions)
- } else {
- moveFileToLocal(file, targetFile, permissions)
- }
-
- fileName
- }
- .toList
- }
-
- def doc2unix(sourceFile: File, targetFile: File, permissions: util.Set[PosixFilePermission]): Unit = {
- var contents = FileUtils.readFileToString(sourceFile, StandardCharsets.UTF_8)
- contents = Pattern.compile("\r").matcher(contents).replaceAll("")
- FileUtils.write(targetFile, contents, StandardCharsets.UTF_8)
- Files.setPosixFilePermissions(targetFile.toPath, permissions)
- }
-
- def moveFileToLocal(sourceFile: File, targetFile: File, permissions: util.Set[PosixFilePermission]): Unit = {
- Files.copy(sourceFile.toPath, targetFile.toPath, StandardCopyOption.REPLACE_EXISTING)
- Files.setPosixFilePermissions(targetFile.toPath, permissions)
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/OssUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/OssUtil.scala
deleted file mode 100644
index 644143b..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/OssUtil.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-
-import com.aliyun.oss.OSSClientBuilder
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil.readInputStreamInToLines
-
-import java.io.InputStream
-import scala.jdk.CollectionConverters._
-
-object OssUtil {
-
- lazy val regex = "oss://([^/]*)/(.*)".r
- lazy val accessKeyId = System.getenv("PROP_AK")
- lazy val accessKeySecret = System.getenv("PROP_SK")
- lazy val endpoint = System.getenv("PROP_ENDPOINT")
- lazy val ossClient = new OSSClientBuilder()
- .build(endpoint, accessKeyId, accessKeySecret)
-
- def recursiveListFiles(configRootDir: String): List[String] = {
- val regex(bucketName, prefix) = configRootDir
- ossClient.listObjectsV2(bucketName, prefix)
- .getObjectSummaries
- .asScala
- .map(it => s"oss://$bucketName/${it.getKey}")
- .filter(it => it.endsWith("sql") || it.endsWith("scala"))
- .toList
- }
-
- def readLines(taskPath: String): List[String] = {
- val regex(bucketName, key) = taskPath
-
- val in = ossClient
- .getObject(bucketName, key)
- .getObjectContent
- readInputStreamInToLines(in, taskPath)
- }
-
- def readFile(propertyPath: String): InputStream = {
- val regex(bucketName, key) = propertyPath
-
- ossClient
- .getObject(bucketName, key)
- .getObjectContent
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ReflectUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ReflectUtil.scala
deleted file mode 100644
index aeab1e8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ReflectUtil.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import scala.reflect.runtime.universe.TermName
-import scala.reflect.runtime.{universe => ru}
-
-
-object ReflectUtil {
- val classMirror: ru.Mirror = ru.runtimeMirror(getClass.getClassLoader)
-
- def reflectObjectMethod(className: String, methodName: String, args: Any*): Any = {
- val moduleSymbol = classMirror.staticModule(className)
- val moduleMirror = classMirror.reflectModule(moduleSymbol)
- val instanceMirror = classMirror.reflect(moduleMirror.instance)
- val methodSymbol = moduleSymbol
- .typeSignature
- .members
- .filter(member => {
- member.isMethod &&
- member.name == TermName(methodName) &&
- member.asMethod.paramLists.nonEmpty &&
- member.asMethod.paramLists.head.size == args.size
- })
- .head
- .asMethod
- instanceMirror.reflectMethod(methodSymbol)(args: _*)
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPClient.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPClient.scala
deleted file mode 100644
index a085218..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPClient.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.datasource.connection.SftpConnection
-import com.jcraft.jsch.{ChannelSftp, JSch, ProxyHTTP, Session}
-
-import java.io.File
-import java.nio.file.Files
-import java.nio.file.attribute.PosixFilePermission
-import java.util
-import java.util.Properties
-
-class SFTPClient(configuration: SftpConnection) {
-
- var channelSftp: ChannelSftp = _
- var session: Session = _
- val SFTP: String = "sftp"
-
- def listFiles(path: String, filter: ChannelSftp#LsEntry => Boolean): List[String] = {
- initSFPTChannel()
-
- var fileNames: List[ChannelSftp#LsEntry] = List()
- channelSftp.ls(path, new ChannelSftp.LsEntrySelector() {
- override def select(entry: ChannelSftp#LsEntry): Int = {
- if (filter.apply(entry)) {
- fileNames = fileNames :+ entry
- }
- 0
- }
- })
- val fileNameToMtime = fileNames.map(entry => (entry.getFilename, entry.getAttrs.getMTime))
- fileNameToMtime.map(tuple => tuple._1)
- }
-
- def downloadFileToLocal(fileName: String, sourceDir: String, destination: String, permissions: util.Set[PosixFilePermission]): Unit = {
- if (!new File(destination).exists()) {
- new File(destination).mkdirs()
- }
- initSFPTChannel()
- val sftpPath = StringUtil.concatFilePath(sourceDir, fileName)
- ETLLogger.info(s"Downloading from SFTP path $sftpPath to local path $destination/$fileName...")
- channelSftp.get(sftpPath, destination)
- Files.setPosixFilePermissions(new File(s"$destination/$fileName").toPath, permissions)
- }
-
- def close(): Unit = {
- if (channelSftp != null && channelSftp.isConnected) {
- channelSftp.disconnect()
- }
-
- if (session != null && session.isConnected) {
- session.disconnect()
- }
- }
-
- private def initSFPTChannel(): Unit = {
- if (channelSftp == null || !channelSftp.isConnected) {
- initSession()
- if (!session.isConnected) {
- session.connect()
- }
- val channel = session.openChannel(SFTP)
- channelSftp = channel.asInstanceOf[ChannelSftp]
- channelSftp.connect()
- }
- }
-
- private def initSession(): Unit = {
- if (session == null) {
- val jSch = new JSch()
- session = jSch.getSession(configuration.username, configuration.host, configuration.port)
- session.setPassword(configuration.password)
- val properties = new Properties()
- properties.setProperty("StrictHostKeyChecking", "no")
- session.setConfig(properties)
- if (configuration.proxyHost != null) {
- session.setProxy(new ProxyHTTP(configuration.proxyHost, configuration.proxyPort.toInt))
- }
- }
- }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPUtil.scala
deleted file mode 100644
index 7a0b983..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/SFTPUtil.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.datasource.config.RemoteFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.connection.SftpConnection
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.BooleanString
-import com.jcraft.jsch.ChannelSftp
-
-import java.nio.file.attribute.PosixFilePermission
-import java.util
-
-object SFTPUtil {
-
- def downloadFiles(step: WorkflowStep,
- prefix: String,
- sourceDir: String,
- destinationDir: String,
- startTime: Long,
- endTime: Long,
- permissions: util.Set[PosixFilePermission]): List[String] = {
- val configuration = new SftpConnection(prefix)
- val sFTPClient = new SFTPClient(configuration)
- val remoteFileDataSourceConfig = step.getSourceConfig[RemoteFileDataSourceConfig]
-
- val filter = (entry: ChannelSftp#LsEntry) => {
- if (remoteFileDataSourceConfig.filterByTime == BooleanString.TRUE) {
- entry.getFilename.matches(remoteFileDataSourceConfig.fileNamePattern) &&
- entry.getAttrs.getMTime >= startTime &&
- entry.getAttrs.getMTime < endTime
- } else {
- entry.getFilename.matches(remoteFileDataSourceConfig.fileNamePattern)
- }
- }
- val fileNames = sFTPClient.listFiles(sourceDir, filter)
- ETLLogger.info(s"fileNames is ${fileNames.mkString(",")}")
- fileNames.foreach(fileName => sFTPClient.downloadFileToLocal(fileName, sourceDir, destinationDir, permissions))
- sFTPClient.close()
- fileNames
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ScalaScriptCompiler.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ScalaScriptCompiler.scala
deleted file mode 100644
index 0403bea..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/ScalaScriptCompiler.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import scala.reflect.runtime
-import scala.tools.reflect.{FrontEnd, ToolBox}
-
-object ScalaScriptCompiler {
-
- final case class DynamicCompileFailedException(message: String) extends RuntimeException(message)
-
- private val toolbox = ToolBox(runtime.currentMirror).mkToolBox(
- frontEnd = new FrontEnd {
- override def display(info: Info): Unit = {
- info.severity.toString() match {
- case "ERROR" => ETLLogger.error(
- s"""
- |Compile ${info.severity}: ${info.msg}
- |near:
- |${info.pos.source.lineToString(info.pos.line - 1)}
- |${info.pos.source.lineToString(info.pos.line)}
- |
- |Helping message:
- |
- |NOTE: You could not using `foreachRDD`, `foreachPartition` or any operator that will involve multiple nodes.
- |
- |If you encounter error "illegal cyclic reference involving object InterfaceAudience",
- |That because you are using some API related to `org.apache.hadoop.fs.Path`,
- |which triggered a bug of Scala: https://github.com/scala/bug/issues/12190,
- |you will need spark-submit options `--conf "spark.executor.userClassPathFirst=true" --conf "spark.driver.userClassPathFirst=true"`
- |(NOTE: this options ONLY works in yarn cluster mode)
- |
- |If you encounter error "object x is not a member of package x",
- |please don't use import but use full package name, like `scala.collection.mutable.Map[String, String]`,
- |sometimes inline the reference works as well.
- |""".stripMargin)
- case _ => ETLLogger.warn(info.toString)
- }
- }
-
- def interactive(): Unit = ()
- },
- options = ""
- )
-
- private def doCompileTransformer(code: String): Any = {
- if (code.startsWith("package ")) {
- throw DynamicCompileFailedException("`package` should not used in scala script.")
- }
- toolbox.eval(toolbox.parse(
- s"""
- |$code
- |
- |${extractObjectName(code)}
- |""".stripMargin))
- }
-
- private def extractObjectName(code: String) = {
- val objectKeywordIndex = code.indexOf("object")
- val extendsKeywordIndex = code.indexOf("extends")
- code.slice(objectKeywordIndex + 6, extendsKeywordIndex).trim
- }
-
- val compileTransformer: Memo1[String, Any] = Memo1 { (code: String) => doCompileTransformer(code) }
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/StringUtil.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/StringUtil.scala
deleted file mode 100644
index f9bb8b8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/StringUtil.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.exception.Exception.MissingConfigurationException
-
-import java.io.File
-import java.math.BigInteger
-import java.util.UUID
-
-object StringUtil {
- def assertNotEmpty(value: String, name: String): Unit = {
- if (isNullOrEmpty(value)) {
- throw new IllegalArgumentException(s"$name can not be null or empty")
- }
- }
-
- def environmentSuffix: String = {
- Option(Environment.CURRENT).getOrElse(Environment.LOCAL).toLowerCase match {
- case "" => ""
- case Environment.LOCAL => ""
- case Environment.DEV => s"-${Environment.DEV}"
- case Environment.QA => s"-${Environment.QA}"
- case Environment.PROD => s"-${Environment.PROD}"
- case Environment.TEST => s"-${Environment.TEST}"
- case _ => s"-${Environment.CURRENT}"
- }
- }
-
- val EMPTY: String = ""
-
- def canNotBeEmpty(name: String, value: String): (String, String) = {
- if (isNullOrEmpty(value)) {
- throw MissingConfigurationException(name)
- } else {
- (name, value)
- }
- }
-
- def getParentPath(path: String): String = {
- path.substring(0, path.lastIndexOf(File.separator))
- }
-
- def getFileNameFromPath(path: String): String = {
- path.substring(path.lastIndexOf(File.separator) + 1)
- }
-
- def concatFilePath(dir: String, fileName: String): String = {
- s"$dir${
- if (dir.endsWith(File.separator)) {
- ""
- } else {
- File.separator
- }
- }$fileName"
- }
-
- def getPrefix(prefix: String): String = {
- Option(prefix).filterNot(isNullOrEmpty).map(_.concat(".")).getOrElse("")
- }
-
- def uuid: String = {
- UUID.randomUUID().toString.replace("-", "")
- }
-
- def uuidName(): String = {
- UUID.randomUUID().toString.split('-').head
- }
-
- def uuidName(prefix: String, function: String): String = {
- prefix
- .replace("\"", "")
- .replace("`", "")
- .replace(" ", "_")
- .replace(".", "_").split("__")(0) + "__" + function + "__" + UUID.randomUUID().toString.split('-').head
- }
-
- def getTempName(prefix: String, function: String): String = {
- prefix
- .replace("\"", "")
- .replace("`", "")
- .replace(" ", "_")
- .replace(".", "_").split("__")(0) + "__" + function
- }
-
- def humpToUnderline(hump: String): String = {
- val builder = new StringBuilder
- hump.foreach(ch => {
- if (ch.isUpper) {
- builder.append("_")
- }
- builder.append(ch.toLower)
- })
- builder.toString()
- }
-
- def isNullOrEmpty(x: String): Boolean = {
- x == null || x.isEmpty
- }
-
- def humpToUnderlineWithUpperCaseCheck(hump: String): String = {
- val builder = new StringBuilder
- var humpCheck = hump
- if (humpCheck == humpCheck.toUpperCase) {
- humpCheck = humpCheck.toLowerCase
- }
-
- var upperIndex = 0
- humpCheck.zipWithIndex.foreach(ch => {
- if (ch._1.isUpper && ch._2 != upperIndex + 1 && ch._2 != 0) {
- upperIndex = ch._2
- builder.append("_")
- }
- builder.append(ch._1.toLower)
- })
- builder.toString()
- }
-
- implicit class BigIntConverter(value: String) {
- def asBigInt: BigInteger = new BigInteger(value)
- }
-
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Try.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Try.scala
deleted file mode 100644
index 0aedfba..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/Try.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Private
-
-import scala.util.control.NonFatal
-
-@Private
-sealed trait Try[+T] extends Product with Serializable {
- def getOrElse[U >: T](default: => U): U
-
- def get: T
-
- def isSuccess(): Boolean
-
- def isFailure(): Boolean
-
- def isSkipped(): Boolean
-}
-
-@Private
-object Try {
- def apply[T](f: T => T, t: T): Try[T] =
- try Success(f(t)) catch {
- case NonFatal(e) => Failure(t, e)
- }
-}
-
-@Private
-final case class Success[+T](result: T) extends Try[T] {
- override def getOrElse[U >: T](default: => U): U = result
-
- override def isSuccess(): Boolean = true
-
- override def isFailure(): Boolean = false
-
- override def isSkipped(): Boolean = false
-
- override def get: T = result
-}
-
-@Private
-final case class Failure[+T](result: T, e: Throwable) extends Try[T] {
- override def getOrElse[U >: T](default: => U): U = default
-
- override def isSuccess(): Boolean = false
-
- override def isFailure(): Boolean = true
-
- override def isSkipped(): Boolean = false
-
- override def get: T = result
-}
-
-@Private
-final case class Skipped[+T](result: T) extends Try[T] {
- override def getOrElse[U >: T](default: => U): U = default
-
- override def isSuccess(): Boolean = false
-
- override def isFailure(): Boolean = false
-
- override def isSkipped(): Boolean = true
-
- override def get: T = result
-}
diff --git a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/WorkflowReader.scala b/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/WorkflowReader.scala
deleted file mode 100644
index e8918b8..0000000
--- a/core/src/main/scala/com/github/sharpdata/sharpetl/core/util/WorkflowReader.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-package com.github.sharpdata.sharpetl.core.util
-
-import com.github.sharpdata.sharpetl.core.util.Constants.PathPrefix
-import com.github.sharpdata.sharpetl.core.exception.Exception.{DuplicatedSqlScriptException, WorkFlowSyntaxException}
-import com.github.sharpdata.sharpetl.core.syntax._
-
-import java.io.{File, FileNotFoundException}
-
-object WorkflowReader {
-
- def readWorkflow(workflowName: String): Workflow = {
- val lines = readLines(workflowName)
- WorkflowParser.parseWorkflow(lines.mkString("\n")) match {
- case success: WFParseSuccess => success.wf
- case fail: WFParseFail =>
- throw WorkFlowSyntaxException(fail.toString)
- }
- }
-
- def readLines(workflowName: String): List[String] = {
- val configRootDir = ETLConfig.getProperty("etl.workflow.path")
- val pathPrefix = getPathPrefix(configRootDir)
- val (taskPathMapping, duplicatedFileNames) = readTaskPathMapping(pathPrefix, configRootDir)
- if (duplicatedFileNames.nonEmpty && duplicatedFileNames.keySet.contains(workflowName)) {
- throw DuplicatedSqlScriptException(
- s"""There are multiple files have the same filename: $workflowName, paths ${duplicatedFileNames(workflowName).mkString(",\n")}
- |Please check your workflow folder and delete the duplicated file.""".stripMargin
- )
- }
- if (taskPathMapping.isDefinedAt(workflowName)) {
- val taskPath = taskPathMapping(workflowName)
- val lines = pathPrefix match {
- case PathPrefix.FILE =>
- IOUtil.readLinesFromText(taskPath)
- case PathPrefix.HDFS | PathPrefix.DBFS =>
- HDFSUtil.readLines(taskPath)
- case PathPrefix.OSS =>
- OssUtil.readLines(taskPath)
- }
- lines
- } else {
- throw new FileNotFoundException(s"Workflow or transformer '$workflowName.sql/.scala' not found.")
- }
- }
-
- def getPathPrefix(configRootDir: String): String = {
- Option(configRootDir)
- .filter(_.indexOf(":") >= 0)
- .map(_.substring(0, configRootDir.indexOf(":")))
- .getOrElse("")
- }
-
- type MappingWithDuplicatedList = (Map[String, String], Map[String, Seq[String]])
-
- val readTaskPathMapping: Memo2[String, String, MappingWithDuplicatedList] =
- Memo2 { (pathPrefix: String, configRootDir: String) => doReadTaskPathMapping(pathPrefix, configRootDir) }
-
- private def doReadTaskPathMapping(pathPrefix: String, configRootDir: String): MappingWithDuplicatedList = {
- val fileNameToPath: Seq[(String, String)] = Seq(pathPrefix)
- .flatMap {
- case PathPrefix.FILE =>
- IOUtil.recursiveListFilesFromResource(configRootDir)
- case PathPrefix.HDFS | PathPrefix.DBFS =>
- HDFSUtil.recursiveListFiles(configRootDir)
- case PathPrefix.OSS =>
- OssUtil.recursiveListFiles(configRootDir)
- }
- .map(path => path.substring(path.lastIndexOf(File.separator) + 1, path.lastIndexOf(".")) -> path)
- (fileNameToPath.toMap, duplicatedFileNames(fileNameToPath))
- }
-
- private def duplicatedFileNames(fileNameToPath: Seq[(String, String)]) = {
- fileNameToPath.groupBy(_._1).filter(_._2.size > 1).map {
- case (fileName, list) => (fileName, list.map(_._2))
- }
- }
-}
diff --git a/core/src/test/resources/application.properties b/core/src/test/resources/application.properties
deleted file mode 100644
index 06e3dd3..0000000
--- a/core/src/test/resources/application.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-etl.workflow.path=tasks
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-
-
-notification.email.sender=sender@localhost
-notification.email.senderPersonalName=Sender
-notification.config.path=notification-test.yaml
diff --git a/core/src/test/resources/application.properties_bak b/core/src/test/resources/application.properties_bak
deleted file mode 100644
index e22f828..0000000
--- a/core/src/test/resources/application.properties_bak
+++ /dev/null
@@ -1,10 +0,0 @@
-etl.workflow.path=tasks
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-from_file_path=true
\ No newline at end of file
diff --git a/core/src/test/resources/application.properties_encrypted b/core/src/test/resources/application.properties_encrypted
deleted file mode 100644
index b00daca..0000000
--- a/core/src/test/resources/application.properties_encrypted
+++ /dev/null
@@ -1,6 +0,0 @@
-etl.workflow.path=tasks
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-some.password=ENC(XVfx2J3gunlX5v+/6YEwpU5/cgKsSr3LNFYheL5Eg5aYh13BnWWE6g==)
\ No newline at end of file
diff --git a/core/src/test/resources/application.properties_hdfs b/core/src/test/resources/application.properties_hdfs
deleted file mode 100644
index fe5fc80..0000000
--- a/core/src/test/resources/application.properties_hdfs
+++ /dev/null
@@ -1,10 +0,0 @@
-etl.workflow.path=tasks
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-from_hdfs_path=true
\ No newline at end of file
diff --git a/core/src/test/resources/etl.key b/core/src/test/resources/etl.key
deleted file mode 100644
index aefe06c..0000000
Binary files a/core/src/test/resources/etl.key and /dev/null differ
diff --git a/core/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/core/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
deleted file mode 100644
index ca6ee9c..0000000
--- a/core/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
+++ /dev/null
@@ -1 +0,0 @@
-mock-maker-inline
\ No newline at end of file
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/ETLConfigSpec.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/ETLConfigSpec.scala
deleted file mode 100644
index e5b3e72..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/ETLConfigSpec.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package com.github.sharpdata.sharpetl.core
-
-import com.github.sharpdata.sharpetl.core.util.{Constants, ETLConfig}
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-
-import java.io.{DataOutputStream, FileOutputStream}
-import com.github.sharpdata.sharpetl.core.util.{Constants, ETLConfig, IOUtil, StringUtil}
-import org.apache.commons.codec.binary.Base64
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.funspec.AnyFunSpec
-
-class ETLConfigSpec extends AnyFunSpec with BeforeAndAfterEach {
-
- ignore("hdfs") {
- it("should read from hdfs") {
- Environment.CURRENT = Constants.Environment.LOCAL
- ETLConfig.setPropertyPath("hdfs:///tmp/application.properties")
- val res = ETLConfig.getProperty("from_hdfs_path")
- assert(!StringUtil.isNullOrEmpty(res))
- assert(res == "true")
- }
- }
-
- it("read from class path") {
- Constants.Environment.CURRENT = Constants.Environment.LOCAL
- val path = ETLConfig.getProperty("etl.workflow.path")
- assert(!StringUtil.isNullOrEmpty(path))
- }
-
- it("read from file path") {
- Constants.Environment.CURRENT = Constants.Environment.LOCAL
- val filePath = getClass.getResource("/application.properties_bak").toString
- ETLConfig.setPropertyPath(filePath)
- val res = ETLConfig.getProperty("from_file_path")
- assert(!StringUtil.isNullOrEmpty(res))
- assert("true" == res)
- }
-
- it("should encrypt password") {
- val filePath = getClass.getResource("/application.properties_encrypted").toString
- ETLConfig.setPropertyPath(filePath)
- val encryptor = ETLConfig.encryptor.get
- val password = "plain text password: 1qaz@WSX"
- val encryptedPassword = encryptor.encrypt(password)
- println(s"ENC($encryptedPassword)")
- assert(encryptor.decrypt(encryptedPassword) == password)
- }
-
- it("base64") {
- println(Base64.encodeBase64String("fMY$Vmbc#D3k".getBytes))
- }
-
- it("read from encrypted file") {
- val filePath = getClass.getResource("/application.properties_encrypted").toString
- ETLConfig.setPropertyPath(filePath)
- val res = ETLConfig.getProperty("some.password")
- assert(!StringUtil.isNullOrEmpty(res))
- assert("plain text password: 1qaz@WSX" == res)
- }
-
- it("write bytes to file and read it out") {
- val filePath = getClass.getResource("/application.properties_encrypted").toString
- ETLConfig.setPropertyPath(filePath)
- var path = this.getClass.getClassLoader.getResource("application.properties").getPath
- path = path.replace("application.properties", "my.key")
- print(path)
- val writer = new DataOutputStream(new FileOutputStream(path))
- val content = "XXXXX="
- val bytes = content.getBytes("UTF-8")
- writer.writeInt(bytes.length)
- bytes.reverse.foreach(x =>
- writer.writeInt(x + 10)
- )
- writer.flush()
- writer.close()
- val reader = IOUtil.getBytesDataReader(path)
- val len = reader.readInt()
- val bytesBuffer = new Array[Byte](len)
- for (index <- 0 until len) {
- bytesBuffer(index) = (reader.readInt() - 10).toByte
- }
- val contentFromFile = new String(bytesBuffer.reverse, "UTF-8")
- assert(content.equals(contentFromFile))
- }
-
- override protected def beforeEach(): Unit = {
- reinitializeETLConfig()
- super.beforeEach()
- }
-
- // trick to reinitialize object, each test starts with a fresh properties
- private def reinitializeETLConfig() = {
- val cons = ETLConfig.getClass.getDeclaredConstructor()
- cons.setAccessible(true)
- cons.newInstance()
- }
-}
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/FormatableSpec.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/FormatableSpec.scala
deleted file mode 100644
index b4b1807..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/FormatableSpec.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.github.sharpdata.sharpetl.core
-
-import com.github.sharpdata.sharpetl.core.datasource.config._
-import com.github.sharpdata.sharpetl.core.syntax.Formatable
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-import scala.beans.BeanProperty
-
-class FormatableSpec extends AnyFlatSpec with should.Matchers {
-
- it should "format config content" in {
- class JobConfig extends Formatable {
- @BeanProperty var inputConfig = new TextFileDataSourceConfig
- @BeanProperty var outputConfig = new CSVDataSourceConfig
- }
-
- val config = new JobConfig()
- config.toString should be(
- """-- inputConfig
- |-- encoding=UTF-8
- |-- codecExtension=
- |-- decompress=false
- |-- strictColumnNum=false
- |-- fileNamePattern=.*
- |-- deleteSource=false
- |-- outputConfig
- |-- inferSchema=true
- |-- encoding=UTF-8
- |-- sep=,
- |-- header=true
- |-- quote="
- |-- escape="
- |-- multiLine=false
- |-- ignoreTrailingWhiteSpace=false
- |-- selectExpr=*
- |-- parseTimeFromFileNameRegex=
- |-- parseTimeFormatPattern=
- |-- parseTimeColumnName=parsedTime
- |-- fileNamePattern=.*
- |-- deleteSource=false""".stripMargin)
-
- }
-}
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/LogDrivenInterpreterSpec.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/LogDrivenInterpreterSpec.scala
deleted file mode 100644
index 758c136..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/LogDrivenInterpreterSpec.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-package com.github.sharpdata.sharpetl.core
-
-import com.github.sharpdata.sharpetl.core.api.LogDrivenInterpreter
-import com.github.sharpdata.sharpetl.core.cli.SingleJobCommand
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.repository.model.JobStatus._
-import com.github.sharpdata.sharpetl.core.syntax.Workflow
-import com.github.sharpdata.sharpetl.core.test.FakeWorkflowInterpreter
-import com.github.sharpdata.sharpetl.core.util.Constants.IncrementalType
-import com.github.sharpdata.sharpetl.core.util.Constants.Job.nullDataTime
-import com.github.sharpdata.sharpetl.core.util.DateUtil.LocalDateTimeToBigInt
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{BigIntConverter, uuid}
-import org.mockito.ArgumentMatchers.anyString
-import org.mockito.MockitoSugar.{mock, when}
-import org.scalatest.flatspec._
-import org.scalatest.matchers._
-import org.scalatest.prop.TableDrivenPropertyChecks._
-
-import java.time.LocalDateTime
-import java.time.temporal.ChronoUnit
-
-class TestJobCommand() extends SingleJobCommand() {
- override def run(): Unit = ()
-}
-
-class LogDrivenInterpreterSpec extends AnyFlatSpec with should.Matchers {
- private val now: LocalDateTime = LocalDateTime.now()
-
- private val execPeriod =
- Table(
- "timeUnit",
- 60 * 24 * 30,
- 60 * 24,
- 60,
- 1
- )
-
- forAll(execPeriod) { period =>
-
- it should s"not schedule when last run 1 sec ago: $period" in {
- val prevDataEndTime = now.minus(1L * period, ChronoUnit.SECONDS)
- val logDrivenJob: LogDrivenInterpreter = setup(prevDataEndTime, period)
- val unexecutedQueue = logDrivenJob.logDrivenPlan()
- unexecutedQueue.isEmpty should be(true)
- }
-
- it should s"schedule 1 job: $period" in {
- val prevDataEndTime = now.minus(1L * period, ChronoUnit.MINUTES)
- val logDrivenJob: LogDrivenInterpreter = setup(prevDataEndTime, period)
- val unexecutedQueue = logDrivenJob.logDrivenPlan()
- unexecutedQueue.size should be(1)
- unexecutedQueue.head.dataRangeStart.asBigInt should be(prevDataEndTime.asBigInt())
- unexecutedQueue.head.dataRangeEnd.asBigInt should be(prevDataEndTime.plus(1L * period, ChronoUnit.MINUTES).asBigInt())
-
- beforeOrEqual(unexecutedQueue.head.dataRangeEnd, now.asBigInt()) should be(true)
- }
-
-
- it should s"schedule 1 job when last job run 1 time unit and 1 secs ago: $period" in {
-
- val prevDataEndTime = now.minus(1 * period, ChronoUnit.MINUTES).minus(1, ChronoUnit.SECONDS)
- val logDrivenJob: LogDrivenInterpreter = setup(prevDataEndTime, period)
- val unexecutedQueue = logDrivenJob.logDrivenPlan()
- unexecutedQueue.size should be(1)
- unexecutedQueue.head.dataRangeStart.asBigInt should be(prevDataEndTime.asBigInt())
- unexecutedQueue.head.dataRangeEnd.asBigInt should be(prevDataEndTime.plus(1L * period, ChronoUnit.MINUTES).asBigInt())
-
- beforeOrEqual(unexecutedQueue.head.dataRangeEnd, now.asBigInt()) should be(true)
- }
-
-
- it should s"schedule 2 job when last job run 2 time unit ago: $period" in {
- val prevDataEndTime = now.minus(2 * period, ChronoUnit.MINUTES)
- val logDrivenJob: LogDrivenInterpreter = setup(prevDataEndTime, period)
- val unexecutedQueue = logDrivenJob.logDrivenPlan()
- unexecutedQueue.size should be(2)
- unexecutedQueue.head.dataRangeStart.asBigInt should be(prevDataEndTime.asBigInt())
- unexecutedQueue.head.dataRangeEnd.asBigInt should be(prevDataEndTime.plus(1L * period, ChronoUnit.MINUTES).asBigInt())
- unexecutedQueue.tail.head.dataRangeStart.asBigInt should be(prevDataEndTime.plus(1L * period, ChronoUnit.MINUTES).asBigInt())
- unexecutedQueue.tail.head.dataRangeEnd.asBigInt should be(prevDataEndTime.plus(2L * period, ChronoUnit.MINUTES).asBigInt())
-
- beforeOrEqual(unexecutedQueue.tail.head.dataRangeEnd, now.asBigInt()) should be(true)
- }
-
- }
-
- private def beforeOrEqual(end: LocalDateTime, now: LocalDateTime) = {
- end.compareTo(now) <= 0
- }
-
- private def beforeOrEqual(end: BigInt, now: BigInt) = {
- end - now <= 0
- }
-
- private def beforeOrEqual(end: String, now: BigInt) = {
- new BigInt(end.asBigInt) - now <= 0
- }
-
- private def setup(prevDataEndTime: LocalDateTime, execPeriod: Int) = {
- val jobLogAccessor = mock[JobLogAccessor]
- mockJobLogAccessor(jobLogAccessor, prevDataEndTime, 24 * 60)
- val command = new TestJobCommand()
- command.once = true
- val logDrivenJob = LogDrivenInterpreter(
- Workflow("workflowName", execPeriod.toString, "incremental", "timewindow", null, null, null, -1, null, false, null, Map(), Nil), // scalastyle:off
- new FakeWorkflowInterpreter(),
- jobLogAccessor = jobLogAccessor,
- command = command
- )
- logDrivenJob
- }
-
- private def mockJobLogAccessor(jobLogAccessor: JobLogAccessor, prevDataEndTime: LocalDateTime, execPeriod: Int): Any = {
- when(jobLogAccessor.lastSuccessExecuted("workflowName")).thenReturn(
- new JobLog(
- jobId = uuid, workflowName = "workflowName",
- period = execPeriod, jobName = "workflowName",
- dataRangeStart = "0", dataRangeEnd = prevDataEndTime.asBigInt().toString,
- jobStartTime = nullDataTime, jobEndTime = nullDataTime,
- status = RUNNING, createTime = now,
- lastUpdateTime = now,
- "",
- IncrementalType.TIMEWINDOW, "","fake-app-001", "project", ""
- )
- )
- when(jobLogAccessor.isAnotherJobRunning(anyString())).thenReturn(null)
- }
-}
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/ScalaScriptCompilerSpec.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/ScalaScriptCompilerSpec.scala
deleted file mode 100644
index 24002c8..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/ScalaScriptCompilerSpec.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.core
-
-import com.github.sharpdata.sharpetl.core.util.ScalaScriptCompiler
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-trait TestTrait {
- def apply(args: Map[String, String]): String
-}
-
-class ScalaScriptCompilerSpec extends AnyFlatSpec with should.Matchers {
- it should s"compile object scala script" in {
- ScalaScriptCompiler.compileTransformer(
- """
- |object TestObject extends com.github.sharpdata.sharpetl.core.TestTrait{
- | def apply(args: Map[String, String]): String = {
- | args.values.reduce(_+_)
- | }
- |}
- |""".stripMargin).asInstanceOf[TestTrait](Map("a" -> "A", "b" -> "B")) should be("AB")
- }
-
-
- it should s"compile object scala script with define" in {
- ScalaScriptCompiler.compileTransformer(
- """
- |object TestObject extends com.github.sharpdata.sharpetl.core.TestTrait{
- | def apply(args: Map[String, String]): String = {
- | args.values.reduce((left, right) => ABC(left).toString + ABC(right).toString)
- | }
- |}
- |
- |final case class ABC(value: String)
- |""".stripMargin).asInstanceOf[TestTrait](Map("a" -> "A", "b" -> "B")) should be("ABC(A)ABC(B)")
- }
-}
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtilTest.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtilTest.scala
deleted file mode 100644
index d37bbb1..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/notification/NotificationUtilTest.scala
+++ /dev/null
@@ -1,184 +0,0 @@
-package com.github.sharpdata.sharpetl.core.notification
-
-import com.github.sharpdata.sharpetl.core.api.WfEvalResult
-import com.github.sharpdata.sharpetl.core.notification.sender.NotificationFactory
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.{JobLog, JobStatus, StepLog}
-import com.github.sharpdata.sharpetl.core.syntax.{Notify, Workflow}
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, Failure, Success}
-import org.mockito.ArgumentMatchers.any
-import org.mockito.Mockito.{mock, times, verify, when}
-import org.mockito.MockitoSugar.withObjectMocked
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-import java.time.LocalDateTime
-
-class NotificationUtilTest extends AnyFlatSpec with should.Matchers {
-
- it should "send notification correctly when config notification setting" in {
-
- withObjectMocked[NotificationFactory.type] {
- val path = getClass.getResource("/application.properties").toString
- ETLConfig.setPropertyPath(path)
- val jobLogAccessor = mock(classOf[JobLogAccessor])
- val service = new NotificationUtil(jobLogAccessor)
-
- val job1 = mockJobLog("job1", "1", JobStatus.FAILURE)
- job1.setStepLogs(Array(mockStepLog("1", "1", JobStatus.FAILURE)))
-
- val job2 = mockJobLog("job2", "2", JobStatus.FAILURE)
- job2.setStepLogs(Array(mockStepLog("2", "1", JobStatus.SUCCESS), mockStepLog("2", "2", JobStatus.FAILURE)))
-
- val wf1 = Workflow("job1", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.ALWAYS)), Map(), List())
-
- val wf2 = Workflow("job2", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "lisi@gmail.com", NotifyTriggerCondition.ALWAYS)), Map(), List())
-
- service.notify(Seq(
- WfEvalResult(wf1, Seq(Failure(job1, new RuntimeException("???")))),
- WfEvalResult(wf2, Seq(Failure(job2, new RuntimeException("???"))))
- ))
- verify(NotificationFactory, times(2)).sendNotification(any())
- }
- }
-
-
- it should "merge notification correctly when recipient is the same" in {
-
- withObjectMocked[NotificationFactory.type] {
- val path = getClass.getResource("/application.properties").toString
- ETLConfig.setPropertyPath(path)
- val jobLogAccessor = mock(classOf[JobLogAccessor])
- val service = new NotificationUtil(jobLogAccessor)
-
- val job1 = mockJobLog("job1", "1", JobStatus.FAILURE)
- job1.setStepLogs(Array(mockStepLog("1", "1", JobStatus.FAILURE)))
-
- val job2 = mockJobLog("job2", "2", JobStatus.FAILURE)
- job2.setStepLogs(Array(mockStepLog("2", "1", JobStatus.SUCCESS), mockStepLog("2", "2", JobStatus.FAILURE)))
-
- val wf1 = Workflow("job1", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.ALWAYS)), Map(), List())
-
- val wf2 = Workflow("job2", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.ALWAYS)), Map(), List())
-
- service.notify(Seq(
- WfEvalResult(wf1, Seq(Failure(job1, new RuntimeException("???")))),
- WfEvalResult(wf2, Seq(Failure(job2, new RuntimeException("???"))))
- ))
- verify(NotificationFactory, times(1)).sendNotification(any())
- }
- }
-
- it should "send notification correctly when no previous executed jobLog" in {
-
- withObjectMocked[NotificationFactory.type] {
- val path = getClass.getResource("/application.properties").toString
- ETLConfig.setPropertyPath(path)
- val jobLogAccessor = mock(classOf[JobLogAccessor])
- val service = new NotificationUtil(jobLogAccessor)
-
- val jobLog = mockJobLog("job2", "2", JobStatus.FAILURE)
- jobLog.setStepLogs(Array(mockStepLog("2", "1", JobStatus.FAILURE)))
-
- val tempWf = Workflow("test", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.ALWAYS)), Map(), List())
-
- service.notify(Seq(WfEvalResult(tempWf, Seq(Success(jobLog)))))
- verify(NotificationFactory, times(1)).sendNotification(any())
- }
-
- }
-
- it should "send notification correctly when trigger condition is failure and last executed success" in {
-
- withObjectMocked[NotificationFactory.type] {
- val path = getClass.getResource("/application.properties").toString
- ETLConfig.setPropertyPath(path)
- val jobLogAccessor = mock(classOf[JobLogAccessor])
- val service = new NotificationUtil(jobLogAccessor)
-
- val jobLog = mockJobLog("job2", "2", JobStatus.FAILURE)
- jobLog.setStepLogs(Array(mockStepLog("2", "1", JobStatus.FAILURE)))
- val previousJobLog = mockJobLog("job2", "1", JobStatus.SUCCESS)
-
- when(jobLogAccessor.getPreviousJobLog(jobLog))
- .thenReturn(previousJobLog)
-
- val wf = Workflow("test", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.FAILURE)), Map(), List())
-
- service.notify(Seq(WfEvalResult(wf, Seq(Success(jobLog)))))
- verify(NotificationFactory, times(1)).sendNotification(any())
- }
-
- }
-
- it should "send notification correctly when trigger condition is failure and last executed failed " in {
-
- withObjectMocked[NotificationFactory.type] {
- val path = getClass.getResource("/application.properties").toString
- ETLConfig.setPropertyPath(path)
- val jobLogAccessor = mock(classOf[JobLogAccessor])
- val service = new NotificationUtil(jobLogAccessor)
-
- val jobLog = mockJobLog("job2", "2", JobStatus.FAILURE)
- jobLog.setStepLogs(Array(mockStepLog("2", "1", JobStatus.FAILURE)))
- val previousJobLog = mockJobLog("job2", "1", JobStatus.FAILURE)
-
- when(jobLogAccessor.getPreviousJobLog(jobLog))
- .thenReturn(previousJobLog)
-
- val wf = Workflow("test", "1440", "full", "timewindow", null, null, null, -1, null, false,
- Seq(Notify("email", "zhangsan@gmail.com", NotifyTriggerCondition.FAILURE)), Map(), List())
-
- service.notify(Seq(WfEvalResult(wf, Seq(Success(jobLog)))))
- verify(NotificationFactory, times(0)).sendNotification(any())
- }
-
- }
-
- private def mockJobLog(wfName: String, jobId: String, status: String): JobLog = {
- new JobLog(
- jobId = jobId,
- workflowName = wfName,
- period = 1440,
- jobName = "20221111",
- dataRangeEnd = "20211212000000",
- dataRangeStart = "20211211000000",
- jobStartTime = LocalDateTime.now(),
- jobEndTime = LocalDateTime.now(),
- status = status,
- createTime = LocalDateTime.now(),
- lastUpdateTime = LocalDateTime.now(),
- logDrivenType = "",
- file = "",
- applicationId = "fake-app-001",
- projectName = "",
- loadType = "",
- runtimeArgs = ""
- )
- }
-
- private def mockStepLog(jobId: String, stepId: String, status: String): StepLog = {
- new StepLog(
- jobId = jobId,
- stepId = stepId,
- status = status,
- startTime = LocalDateTime.now(),
- endTime = LocalDateTime.now(),
- duration = 10,
- output = "",
- error = "",
- successCount = 10,
- sourceCount = 10,
- targetCount = 10,
- failureCount = 10,
- sourceType = "",
- targetType = ""
- )
- }
-}
diff --git a/core/src/test/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParserSpec.scala b/core/src/test/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParserSpec.scala
deleted file mode 100644
index 193df6b..0000000
--- a/core/src/test/scala/com/github/sharpdata/sharpetl/core/syntax/WorkflowParserSpec.scala
+++ /dev/null
@@ -1,813 +0,0 @@
-package com.github.sharpdata.sharpetl.core.syntax
-
-import com.github.sharpdata.sharpetl.core.datasource.config._
-import fastparse._
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowParser._
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.should
-
-class WorkflowParserSpec extends AnyFunSpec with should.Matchers {
- it("parse invalid workflow without step to error") {
- val text =
- """
- |-- workflow=abc
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |-- dependsOn=dim_user,dim_price
- |-- comment=this ETL script is working for xxx
- |-- timeout=3600
- |-- defaultStart=20220101
- |-- stopScheduleWhenFail=false
- |-- options
- |-- project=abc
- |-- notify
- |-- notifyType=email
- |-- recipients=a@q.com,b@q.com
- |-- notifyCondition=SUCCESS/FAILURE/ALWAYS
- |""".stripMargin
-
- val r = parseWorkflow(text)
- r.asInstanceOf[WFParseFail].toString should be(
- """18:1: error: Expected parse by `stepHeader` at 18:1, but found "".
- |Parse stack is workflow:1:1 / step:18:1 / stepHeader:18:1
- |18|-- notifyCondition=SUCCESS/FAILURE/ALWAYS
- | |^""".stripMargin)
- }
-
- it("parse invalid workflow to error") {
- val text =
- """
- |-- workflow=abc
- |-- period=1440
- |-- loadType
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |-- dependsOn=dim_user,dim_price
- |-- comment=this ETL script is working for xxx
- |-- timeout=3600
- |-- defaultStart=20220101
- |-- stopScheduleWhenFail=false
- |""".stripMargin
-
- val r = parseWorkflow(text)
- //println(r.asInstanceOf[WFParseFail].toString)
- r.asInstanceOf[WFParseFail].toString should be(
- """4:1: error: Expected parse by `stepHeader` at 4:1, but found "-- loadTy".
- |Parse stack is workflow:1:1 / step:4:1 / stepHeader:4:1
- |4|-- loadType
- | |^""".stripMargin)
- }
-
- it("parse sample workflow") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |-- dependsOn=dim_user,dim_price
- |-- comment=this ETL script is working for xxx
- |-- timeout=3600
- |-- defaultStart=20220101
- |-- stopScheduleWhenFail=false
- |-- options
- |-- project=abc
- |-- notify
- |-- notifyType=email
- |-- recipients=a@q.com,b@q.com
- |-- notifyCondition=SUCCESS/FAILURE/ALWAYS
- |
- |-- step=read csv
- |-- source=csv
- |-- target=csv
- |
- |""".stripMargin
-
- val header = parse(text, workflow(_))
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.name should be("same_with_file_name")
- wf.options.size should be(1)
- wf.notifies.head.recipients should be("a@q.com,b@q.com")
- wf.timeout should be(3600)
- }
-
- it("parse simple workflow") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |-- dependsOn=dim_user,dim_price
- |-- defaultStart=20220101
- |-- notify
- |-- notifyType=email
- |-- recipients=a@q.com,b@q.com
- |-- notifyCondition=SUCCESS/FAILURE/ALWAYS
- |-- notify
- |-- notifyType=email
- |-- recipients=c@q.com,d@q.com
- |-- notifyCondition=ALWAYS
- |
- |-- step=read csv
- |-- source=csv
- |-- target=csv
- |
- |""".stripMargin
-
- val header = parseWorkflow(text)
- //println(header.asInstanceOf[WFParseFail].toString)
- header.isSuccess should be(true)
- val wf = header.get
- wf.name should be("same_with_file_name")
- wf.options should be(Map())
- wf.timeout should be(0)
- wf.notifies.size should be(2)
- wf.notifies.head.recipients should be("a@q.com,b@q.com")
- wf.notifies.reverse.head.recipients should be("c@q.com,d@q.com")
- }
-
- it("parse sample un-ordered workflow") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |-- dependsOn=dim_user,dim_price
- |-- comment=this ETL script is working for xxx
- |-- timeout=3600
- |
- |-- defaultStart=20220101
- |-- stopScheduleWhenFail=false
- |-- options
- |-- project=abc
- |
- |-- notify
- |-- notifyType=email
- |-- recipients=a@q.com,b@q.com
- |-- notifyCondition=SUCCESS/FAILURE/ALWAYS
- |
- |
- |-- step=read csv
- |-- source=csv
- |-- target=csv
- |select 'a' as result;
- |
- |-- step=read csv
- |
- |-- source=csv
- |
- |-- target=csv
- |
- |WITH location AS (
- | SELECT DISTINCT code
- | FROM table1
- | WHERE id = (1,2,3)
- | AND is_active = 1
- |)
- |select * from location;
- |
- |""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- //print(header.asInstanceOf[Parsed.Failure].trace())
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.name should be("same_with_file_name")
- wf.options.size should be(1)
- wf.notifies.head.recipients should be("a@q.com,b@q.com")
- wf.timeout should be(3600)
- wf.steps.size should be(2)
- wf.steps(0).sqlTemplate should be("select 'a' as result")
- wf.steps(1).sqlTemplate should be(
- """WITH location AS (
- | SELECT DISTINCT code
- | FROM table1
- | WHERE id = (1,2,3)
- | AND is_active = 1
- |)
- |select * from location""".stripMargin)
- }
-
-
- it("parse sample step") {
- val text =
- """-- step=read csv
- |-- source=csv
- |-- fileDir=hdfs:///data/test/
- |-- filePaths=hdfs:///data/test/USER_${daily_postfix}.txt
- |-- sep=|
- |-- inferSchema=false
- |-- quote='
- |-- target=csv
- |-- fileDir=hdfs:///data/test/
- |-- filePaths=hdfs:///data/test/USER_${daily_postfix}.txt
- |-- sep=|
- |-- inferSchema=false
- |-- quote='
- |-- writeMode=overwrite
- |""".stripMargin
-
- val header = parse(text, step(_))
- header.isSuccess should be(true)
- val s = header.get.value
- s.sqlTemplate should be("")
- s.writeMode should be("overwrite")
- }
-
-
- it("parse complex step") {
- val text =
- """-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |-- step=1
- |-- source=temp
- |
- |-- target=variables
- |select id, name,
- |
- |-- \\\\ -- this is another comment date=lates
- | some_table;
- |
- |
- |
- |-- step=2
- |-- source=hive
- |-- target=temp
- |-- tableName=ads_si_view
- |-- throwExceptionIfEmpty=true
- |
- |-- \\\\ -- this is step2 sql
- |WITH location AS (
- | SELECT DISTINCT code
- | FROM table1
- | WHERE id = (1,2,3)
- | AND is_active = 1
- |)
- |
- |-- \\\\ -- this is step2 select sql
- |
- |SELECT id,name,location
- |FROM table2
- | LEFT OUTER JOIN supplier ON si.supplier_code = supplier.supplier_code
- | LEFT OUTER JOIN cust ON si.cust_code = cust.oper_unit_code
- | LEFT OUTER JOIN product ON SI.materiel_code = product.materiel_code
- |;
- |
- |""".stripMargin
-
- val header = parse(text, workflow(_))
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.steps(0).sqlTemplate should be(
- """select id, name,
- |
- |-- \\\\ -- this is another comment date=lates
- | some_table""".stripMargin)
- wf.steps(1).sqlTemplate should be(
- """-- \\\\ -- this is step2 sql
- |WITH location AS (
- | SELECT DISTINCT code
- | FROM table1
- | WHERE id = (1,2,3)
- | AND is_active = 1
- |)
- |
- |-- \\\\ -- this is step2 select sql
- |
- |SELECT id,name,location
- |FROM table2
- | LEFT OUTER JOIN supplier ON si.supplier_code = supplier.supplier_code
- | LEFT OUTER JOIN cust ON si.cust_code = cust.oper_unit_code
- | LEFT OUTER JOIN product ON SI.materiel_code = product.materiel_code
- |""".stripMargin)
-
- }
-
- it("parse options and conf") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=1
- |-- source=temp
- |-- options
- |-- delimiter='
- |-- header=true
- |-- target=variables
- |-- options
- |-- delimiter="
- |-- header=false
- |-- conf
- |-- spark.sql.shuffle.partitions=1
- |select id, name,
- |
- |-- \\\\ -- this is another comment date=lates
- | some_table;""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- //print(header.asInstanceOf[Parsed.Failure].trace())
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 1)
-
- assert(steps(0).conf("spark.sql.shuffle.partitions") == "1")
-
- assert(steps(0).source.dataSourceType == "temp")
- assert(steps(0).source.options.size == 2)
- assert(steps(0).source.options("delimiter") == "'")
- assert(steps(0).source.options("header") == "true")
-
- assert(steps(0).target.dataSourceType == "variables")
- assert(steps(0).target.options.size == 2)
- assert(steps(0).target.options("delimiter") == "\"")
- assert(steps(0).target.options("header") == "false")
- }
-
- it("parse es options") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=1
- |-- source=bigquery
- |-- options
- |-- delimiter='
- |-- queryTimeout=5
- |-- target=es
- |-- tableName=customer_label
- |-- primaryKeys=cifsn
- |-- options
- |-- es.mapping.parent=a=1;b=2;c=3
- |select id, name,
- |
- |-- \\\\ -- this is another comment date=lates
- | some_table;""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- //print(header.asInstanceOf[Parsed.Failure].trace())
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 1)
-
- val source = steps(0).source.asInstanceOf[BigQueryDataSourceConfig]
- assert(source.dataSourceType == "bigquery")
- assert(source.options.size == 2)
- assert(source.options("delimiter") == "'")
- assert(source.options("queryTimeout") == "5")
-
- val target = steps(0).target.asInstanceOf[DBDataSourceConfig]
- assert(target.dataSourceType == "es")
- assert(target.getPrimaryKeys == "cifsn")
- assert(target.options.size == 1)
- assert(target.options("es.mapping.parent") == "a=1;b=2;c=3")
- }
-
- it("parse steps without sql") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=1
- |-- source=compresstar
- |-- encoding=utf-8
- |-- targetPath=/out/
- |-- tarPath=/out/((\w*.tar.gz))
- |-- tmpPath=/out/tmp/
- |-- bakPath=/out/bak/
- |-- fileNamePattern=\d{1}.txt
- |-- options
- |-- es.mapping.parent=a=1;b=2;c=3
- |-- target=do_nothing
- |-- options
- |-- options.parent=a=1;b=2;c=3
- |
- |-- step=2
- |-- source=csv
- |-- encoding=utf-8
- |-- inferSchema=true
- |-- sep=\t
- |-- header=false
- |-- fileNamePattern=\w*_1.txt
- |-- selectExpr=_c0 as num
- |-- fileDir=/out/
- |-- target=do_nothing
- |
- |-- step=3
- |-- source=csv
- |-- encoding=utf-8
- |-- inferSchema=true
- |-- sep=\t
- |-- header=false
- |-- fileNamePattern=\w*_1.txt
- |-- selectExpr=_c0 as num
- |-- fileDir=/out/
- |-- target=do_nothing
- |""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 3)
-
- val source1 = steps(0).source.asInstanceOf[CompressTarConfig]
- assert(source1.dataSourceType == "compresstar")
- assert(source1.options.size == 1)
- assert(source1.options("es.mapping.parent") == "a=1;b=2;c=3")
-
- val target1 = steps(0).target.asInstanceOf[DBDataSourceConfig]
- assert(target1.dataSourceType == "do_nothing")
- assert(target1.options.size == 1)
- assert(target1.options("options.parent") == "a=1;b=2;c=3")
-
-
- val source2 = steps(1).source.asInstanceOf[CSVDataSourceConfig]
- assert(source2.dataSourceType == "csv")
- assert(source2.options.size == 0)
-
- val target2 = steps(1).target.asInstanceOf[DBDataSourceConfig]
- assert(target2.dataSourceType == "do_nothing")
- assert(target2.options.size == 0)
-
- val source3 = steps(2).source.asInstanceOf[CSVDataSourceConfig]
- assert(source3.dataSourceType == "csv")
- assert(source3.options.size == 0)
-
- val target3 = steps(2).target.asInstanceOf[DBDataSourceConfig]
- assert(target3.dataSourceType == "do_nothing")
- assert(target3.options.size == 0)
- }
-
-
- it("parse transformer args") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=5
- |-- source=transformation
- |-- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
- |-- methodName=transform
- |-- dwDataLoadType=full
- |-- dwViewName=`4e9e6a00`
- |-- odsViewName=`e7fb019e`
- |-- primaryFields=id
- |-- sortFields=id
- |-- transformerType=object
- |-- target=hive
- |-- dbName=default
- |-- tableName=test_cust
- |-- writeMode=overwrite
- |""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 1)
-
- val source1 = steps(0).source.asInstanceOf[TransformationDataSourceConfig]
- assert(source1.dataSourceType == "transformation")
- assert(source1.methodName == "transform")
- assert(source1.transformerType == "object")
- assert(source1.args.size == 5)
- assert(source1.args("dwDataLoadType") == "full")
- assert(source1.args("dwViewName") == "`4e9e6a00`")
- assert(source1.args("odsViewName") == "`e7fb019e`")
- assert(source1.args("primaryFields") == "id")
- assert(source1.args("sortFields") == "id")
- assert(source1.options.size == 0)
-
- val target1 = steps(0).target.asInstanceOf[DBDataSourceConfig]
- target1.tableName should be("test_cust")
- target1.dbName should be("default")
-
- steps.head.writeMode should be("overwrite")
- }
-
- it("parse variable data source config") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=generate variables(检查重复)
- |-- source=postgres
- |-- connectionName=my_postgres
- |-- dbName=pg_db
- |-- tableName=my_table
- |-- target=variables
- |select from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`;
- |
- |""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 1)
-
- val source1 = steps(0).source.asInstanceOf[DBDataSourceConfig]
- assert(source1.dataSourceType == "postgres")
- assert(source1.connectionName == "my_postgres")
- assert(source1.dbName == "pg_db")
- assert(source1.tableName == "my_table")
-
- steps(0).target.asInstanceOf[VariableDataSourceConfig]
- }
-
- it("parse multi-line pair values") {
- val text =
- """-- connectionName=|
- |-- |Cats is a library which provides abstractions for functional programming in the Scala programming language.
- |-- |
- |-- |Scala supports both object-oriented and functional programming,
- |-- |and this is reflected in the hybrid approach of the standard library.
- |-- |Cats strives to provide functional programming abstractions that are core,
- |-- |binary compatible, modular, approachable and efficient.
- |-- |A broader goal of Cats is to provide a foundation for an ecosystem of pure,
- |-- |typeful libraries to support functional programming in Scala applications.
- |-- dbName=pg_db
- |-- tableName=my_table
- |""".stripMargin
-
- val header = parse(text, keyValPairs(2)(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.size should be(3)
- wf.head._2 should be(
- """Cats is a library which provides abstractions for functional programming in the Scala programming language.
- |
- |Scala supports both object-oriented and functional programming,
- |and this is reflected in the hybrid approach of the standard library.
- |Cats strives to provide functional programming abstractions that are core,
- |binary compatible, modular, approachable and efficient.
- |A broader goal of Cats is to provide a foundation for an ecosystem of pure,
- |typeful libraries to support functional programming in Scala applications.""".stripMargin)
- }
-
- it("parse nested multi-line pair value in workflow") {
- val text =
- """
- |-- workflow=same_with_file_name
- |-- period=1440
- |-- loadType=incremental/full
- |-- logDrivenType=upstream/timewindow/diff/kafka/inc...
- |-- upstream=ods_product
- |
- |
- |-- step=generate variables(检查重复)
- |-- source=postgres
- |-- connectionName=|
- |-- |Cats is a library which provides abstractions for functional programming in the Scala programming language.
- |-- |
- |-- |Scala supports both object-oriented and functional programming,
- |-- |and this is reflected in the hybrid approach of the standard library.
- |-- |Cats strives to provide functional programming abstractions that are core,
- |-- |binary compatible, modular, approachable and efficient.
- |-- |A broader goal of Cats is to provide a foundation for an ecosystem of pure,
- |-- |typeful libraries to support functional programming in Scala applications.
- |-- dbName=pg_db
- |-- tableName=my_table
- |-- options
- |-- es.mapping.parent=|
- |-- |[
- |-- |{"source_table": "a", "target_table": "a" },
- |-- |{"source_table": "b", "target_table": "b" }
- |-- |]
- |-- target=variables
- |select from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
- | from_unixtime(unix_timestamp('2022-06-20 10:27:00', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`;
- |
- |""".stripMargin
-
- val header = parse(text, workflow(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- val steps = wf.steps
-
- assert(steps.length == 1)
-
- val source1 = steps(0).source.asInstanceOf[DBDataSourceConfig]
- assert(source1.dataSourceType == "postgres")
- assert(source1.connectionName ==
- """Cats is a library which provides abstractions for functional programming in the Scala programming language.
- |
- |Scala supports both object-oriented and functional programming,
- |and this is reflected in the hybrid approach of the standard library.
- |Cats strives to provide functional programming abstractions that are core,
- |binary compatible, modular, approachable and efficient.
- |A broader goal of Cats is to provide a foundation for an ecosystem of pure,
- |typeful libraries to support functional programming in Scala applications.""".stripMargin)
- assert(source1.dbName == "pg_db")
- assert(source1.tableName == "my_table")
-
- source1.options("es.mapping.parent") should be(
- """[
- |{"source_table": "a", "target_table": "a" },
- |{"source_table": "b", "target_table": "b" }
- |]""".stripMargin)
- }
-
- it("parse multi-line pair values in nestedObj") {
- val text =
- """-- options
- |-- es.mapping.parent=|
- |-- |[
- |-- |{"source_table": "a", "target_table": "a" },
- |-- |{"source_table": "b", "target_table": "b" }
- |-- |]
- |-- args
- |-- a=b
- |""".stripMargin
-
- val header = parse(text, options(2)(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.head._2 should be(
- """[
- |{"source_table": "a", "target_table": "a" },
- |{"source_table": "b", "target_table": "b" }
- |]""".stripMargin)
- }
-
- it("parse logDrivenType") {
- val text =
- """-- step=6
- |-- source=transformation
- |-- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
- |-- methodName=transform
- |-- businessCreateTime=order_create_time
- |-- businessUpdateTime=order_update_time
- |-- currentDb=postgres
- |-- currentDbType=postgres
- |-- currentTable=dwd.t_fact_order
- |-- currentTableColumnsAndType=???
- |-- primaryFields=order_sn
- |-- slowChanging=false
- |-- updateTable=ods_t_order__target_selected
- |-- updateType=full
- |-- transformerType=object
- |-- target=do_nothing""".stripMargin
-
- val steplist = parse(text, steps(_)).get.value.toList
-
- val step = steplist.reverse.head
-
- step.toString should be(text + "\n")
- }
-
- it("parse single key value pair") {
- val text =
- """-- connectionName=a""".stripMargin
-
- val header = parse(text, keyValPairs(2)(_), verboseFailures = true)
- header match {
- case Parsed.Success(_, _) => ()
- case failure: Parsed.Failure => print(failure.trace())
- }
- header.isSuccess should be(true)
- val wf = header.get.value
- wf.size should be(1)
- wf.head._2 should be("a")
- }
-
- it("parse to temp data source if source not specified") {
- val wf1 =
- """-- workflow=default_to_temp_source
- |-- period=1440
- |-- loadType=incremental
- |-- logDrivenType=timewindow
- |
- |-- step=1
- |-- target=temp
- |-- tableName=temp_table
- |select 'SUCCESS' as `RESULT`;
- |
- |-- step=2
- |-- target=console
- |select * from temp_table;""".stripMargin
-
-
- val wf2 =
- """-- workflow=default_to_temp_source
- |-- period=1440
- |-- loadType=incremental
- |-- logDrivenType=timewindow
- |
- |-- step=1
- |-- source=temp
- |-- target=temp
- |-- tableName=temp_table
- |select 'SUCCESS' as `RESULT`;
- |
- |-- step=2
- |-- source=temp
- |-- target=console
- |select * from temp_table;""".stripMargin
-
- parseWorkflow(wf1).get.toString should be(parseWorkflow(wf2).get.toString)
- }
-
- it("parse loop over another table") {
- val wf =
- """-- workflow=default_to_temp_source
- |-- period=1440
- |-- loadType=incremental
- |-- logDrivenType=timewindow
- |
- |-- step=1
- |-- source=temp
- |-- target=temp
- |-- tableName=temp_table
- |select 'test_1' as `table_name`
- |union all
- |select 'test_2' as `table_name`
- |union all
- |select 'test_3' as `table_name`
- |union all
- |select 'test_4' as `table_name`
- |
- |-- step=2
- |-- source=http
- |-- url=http://localhost:1080/get_from_table/${table_name}
- |-- target=temp
- |-- tableName=target_temp_table
- |-- loopOver=temp_table""".stripMargin
-
- parseWorkflow(wf).get.steps(1).loopOver should be("temp_table")
- }
-}
diff --git a/data-modeling/build.gradle b/data-modeling/build.gradle
deleted file mode 100644
index 6ce0c72..0000000
--- a/data-modeling/build.gradle
+++ /dev/null
@@ -1,38 +0,0 @@
-plugins {
- id "java-library"
- id "scala"
- id "com.github.alisiikh.scalastyle"
- id "com.github.maiflai.scalatest"
-}
-
-group = 'com.github.sharpdata.sharpetl'
-sourceCompatibility = 1.8
-version = '0.2.0'
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "com.google.code.gson:gson:2.9.0"
- implementation group: 'org.apache.poi', name: 'poi', version: '4.1.0'
- implementation group: 'org.apache.poi', name: 'poi-ooxml', version: '4.1.0'
- implementation "com.google.guava:guava:30.0-jre"
- implementation 'info.picocli:picocli:4.6.3'
-
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-flatspec_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalactic", name: "scalactic_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.junit.jupiter", name: "junit-jupiter-api", version: "5.6.2"
- testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:5.6.2"
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
-}
-
-scalastyle {
- config = file("${rootDir}/scalastyle_config.xml") // path to scalastyle config xml file
- failOnWarning = true
- sourceSets {
- test {
- skip = true
- }
- }
-}
\ No newline at end of file
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/Exception.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/Exception.scala
deleted file mode 100644
index d86121d..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/Exception.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling
-
-object Exception {
- final case class TargetTableNotExistException(msg: String) extends RuntimeException(msg)
-
- final case class TooManyTargetTableException(msg: String) extends RuntimeException(msg)
-
- final case class UnsupportedPartitionPatternException(message: String) extends Exception(message)
-
- final case class TableConfigAndColumnConfigNoMatchException(msg: String) extends RuntimeException(msg)
-
- final case class TableConfigHasDuplicateSourceAndTargetTableException(msg: String) extends RuntimeException(msg)
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/cli/Command.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/cli/Command.scala
deleted file mode 100644
index 91ba0cf..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/cli/Command.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.cli
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import com.github.sharpdata.sharpetl.core.cli.{BatchJobCommand, CommonCommand}
-import com.github.sharpdata.sharpetl.core.util.IOUtil.getFullPath
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, IOUtil}
-import com.github.sharpdata.sharpetl.modeling.sql.gen.DwdWorkflowGen.genWorkflow
-import picocli.CommandLine
-
-import java.io.{BufferedWriter, File, FileWriter}
-
-@CommandLine.Command(name = "generate-ods-sql")
-class GenerateSqlFiles extends CommonCommand {
- @CommandLine.Option(
- names = Array("-f", "--file"),
- description = Array("Excel file path"),
- required = true
- )
- var filePath: String = _
-
- @CommandLine.Option(
- names = Array("-h", "--help"),
- usageHelp = true,
- description = Array("Sample parameters: -f=/path/to/config.xlsx")
- )
- var helpRequested = false
-
- @CommandLine.Option(
- names = Array("--output"),
- required = true,
- description = Array("Write to sql file path")
- )
- var output: String = _
-
- override def formatCommand(): Unit = {
- commandStr.append(s"--file=$filePath \t")
- commandStr.append(s"--output=$output \t")
- commandStr.append(s"--help=$helpRequested \t")
- super.formatCommand()
- }
-
- override def run(): Unit = {
- loggingJobParameters()
- import com.github.sharpdata.sharpetl.modeling.excel.parser.OdsTableParser
- import com.github.sharpdata.sharpetl.modeling.sql.gen.OdsWorkflowGen
- val odsModelings = OdsTableParser.readOdsConfig(filePath)
- odsModelings
- .foreach(modeling => {
- val workflowName = s"ods__${modeling.odsTableConfig.targetTable}"
- val workflow = OdsWorkflowGen.genWorkflow(modeling, workflowName)
- writeFile(workflowName, workflow.toString)
- })
- }
-
- def writeFile(filename: String, sqlContent: String): Unit = {
- val path = getFullPath(output)
- val file = new File(s"$path/$filename.sql")
- ETLLogger.info(s"Write sql file to $file")
- val sqlWriter = new BufferedWriter(new FileWriter(file))
- sqlWriter.write(sqlContent)
- sqlWriter.close()
- }
-}
-
-@CommandLine.Command(name = "generate-dwd-sql")
-class GenerateDwdStepCommand extends BatchJobCommand {
-
- @CommandLine.Option(names = Array("--output"), description = Array("Write to sql file path"))
- var outputPath: String = _
-
- override def formatCommand(): Unit = {
- commandStr.append(s"--output=$outputPath \t")
- super.formatCommand()
- }
-
-
- override def run(): Unit = {
- loggingJobParameters()
- val tables = DwdTableParser.readDwdConfig(excelOptions.filePath)
-
- val path = getFullPath(outputPath)
-
- tables
- .foreach(table => {
- val workflowName = s"${table.dwdTableConfig.sourceTable}_${table.dwdTableConfig.targetTable}"
- val workflow = genWorkflow(table, workflowName)
- val file = s"$path/$workflowName.sql"
- ETLLogger.info(s"Write sql file to $file")
- IOUtil.write(
- path = file,
- line = workflow.toString
- )
- })
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/DwdTable.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/DwdTable.scala
deleted file mode 100644
index 25f2ec6..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/DwdTable.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.excel.model
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.modeling.sql.gen.DwdExtractSqlGen.ZIP_ID_FLAG
-
-object DwdTableConfigSheetHeader {
- val DWD_TABLE_CONFIG_SHEET_NAME = "dwd_etl_config"
-
- val SOURCE_CONNECTION = "source_connection"
- val SOURCE_TYPE = "source_type"
- val SOURCE_DB = "source_db"
- val SOURCE_TABLE = "source_table"
- val TARGET_CONNECTION = "target_connection"
- val TARGET_TYPE = "target_type"
- val TARGET_DB = "target_db"
- val TARGET_TABLE = "target_table"
- val FACT_OR_DIM = "fact_or_dim"
- val SLOW_CHANGING = "slow_changing"
- val ROW_FILTER_EXPRESSION = "row_filter_expression"
- val LOAD_TYPE = "load_type"
- val LOG_DRIVEN_TYPE = "log_driven_type"
- val UPSTREAM = "upstream"
- val DEPENDS_ON = "depends_on"
- val DEFAULT_START = "default_on"
-}
-
-object DwdModelingSheetHeader {
- val DWD_MODELING_SHEET_NAME = "dwd_config" // TODO: rename
-
- val SOURCE_TABLE = "source_table"
- val TARGET_TABLE = "target_table"
- val SOURCE_COLUMN = "source_column"
- val SOURCE_COLUMN_DESCRIPTION = "source_column_description"
- val TARGET_COLUMN = "target_column"
- val TARGET_COLUMN_TYPE = "target_column_type"
- val EXTRA_COLUMN_EXPRESSION = "extra_column_expression"
- val PARTITION_COLUMN = "partition_column"
- val LOGIC_PRIMARY_COLUMN = "logic_primary_column"
- val JOIN_DB_CONNECTION = "join_db_connection"
- val JOIN_DB_TYPE = "join_db_type"
- val JOIN_DB = "join_db"
- val JOIN_TABLE = "join_table"
- val JOIN_ON = "join_on"
- val CREATE_DIM_MODE = "create_dim_mode"
- val JOIN_TABLE_COLUMN = "join_table_column"
- val BUSINESS_CREATE_TIME = "business_create_time"
- val BUSINESS_UPDATE_TIME = "business_update_time"
- val IGNORE_CHANGING_COLUMN = "ignore_changing_column"
- val QUALITY_CHECK_RULES = "quality_check_rules"
-}
-
-object FactOrDim {
- val FACT = "fact"
- val DIM = "dim"
-}
-
-object CreateDimMode {
- val NEVER = "never"
- val ONCE = "once"
- val ALWAYS = "always"
-}
-
-/**
- * 粒度: sourceTable + targetTable
- * TODO 暂未考虑质量check
- */
-final case class DwdTableConfig(sourceConnection: String,
- sourceType: String,
- sourceDb: String,
- sourceTable: String,
- targetConnection: String,
- targetType: String,
- targetDb: String,
- targetTable: String,
- factOrDim: String,
- slowChanging: Boolean,
- rowFilterExpression: String,
- loadType: String,
- logDrivenType: String,
- upstream: String,
- dependsOn: String,
- defaultStart: String
- )
-
-
-final case class DwdModelingColumn(sourceTable: String,
- targetTable: String,
- sourceColumn: String,
- sourceColumnDescription: String,
- targetColumn: String,
- targetColumnType: String,
- extraColumnExpression: String,
- partitionColumn: Boolean,
- logicPrimaryColumn: Boolean,
- joinDbConnection: String,
- joinDbType: String,
- joinDb: String,
- joinTable: String,
- joinOn: String,
- createDimMode: String,
- joinTableColumn: String,
- businessCreateTime: Boolean,
- businessUpdateTime: Boolean,
- ignoreChangingColumn: Boolean,
- qualityCheckRules: String) {
- lazy val joinTempFieldPrefix = s"${joinDb}_${joinTable}____"
-}
-
-
-final case class DwdModeling(dwdTableConfig: DwdTableConfig, columns: Seq[DwdModelingColumn])
-
-final case class DimTable(dimTable: String, cols: Seq[DwdModelingColumn], partitionCols: Seq[DwdModelingColumn],
- updateTimeCols: Seq[DwdModelingColumn], createTimeCols: Seq[DwdModelingColumn]) {
- val noneAutoCreateDimIdColumns: Seq[DwdModelingColumn] = cols.filterNot(_.extraColumnExpression == ZIP_ID_FLAG)
-
- val joinOnColumns: Seq[DwdModelingColumn] = cols.filterNot(it => isNullOrEmpty(it.joinOn))
-
- val autoCreateColumns: Seq[DwdModelingColumn] = cols.filter(_.extraColumnExpression == ZIP_ID_FLAG)
-
- val additionalCols: Seq[DwdModelingColumn] = partitionCols ++ updateTimeCols ++ createTimeCols
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/OdsTable.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/OdsTable.scala
deleted file mode 100644
index 397d6b9..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/model/OdsTable.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.excel.model
-
-object OdsTableConfigSheetHeader {
- val ODS_TABLE_CONFIG_SHEET_NAME = "ods_etl_config"
-
- val SOURCE_CONNECTION = "source_connection"
- val SOURCE_TABLE = "source_table"
- val SOURCE_DB = "source_db"
- val SOURCE_TYPE = "source_type"
-
- val TARGET_CONNECTION = "target_connection"
- val TARGET_TABLE = "target_table"
- val TARGET_DB = "target_db"
- val TARGET_TYPE = "target_type"
-
- val FILTER_EXPR = "row_filter_expression"
-
- val UPDATE_TYPE = "update_type"
- val PARTITION_FORMAT = "partition_format"
- val TIME_FORMAT = "time_format"
- val PERIOD = "period"
-}
-
-object OdsModelingSheetHeader {
- val ODS_MODELING_SHEET_NAME = "ods_config"
-
- val SOURCE_COLUMN = "source_column"
- val COLUMN_TYPE = "column_type"
- val INCREMENTAL_COLUMN = "incremental_column"
- val PRIMARY_COLUMN = "is_PK"
-
- val TARGET_COLUMN = "target_column"
-
- val EXTRA_COLUMN_EXPRESSION = "extra_column_expression"
-}
-
-object OdsTable {
- final case class OdsTableConfig(sourceConnection: String,
- sourceTable: String,
- sourceDb: String,
- sourceType: String,
- targetConnection: String,
- targetTable: String,
- targetDb: String,
- targetType: String,
- filterExpression: String,
- loadType: String,
- logDrivenType: String,
- upstream: String,
- dependsOn: String,
- defaultStart: String,
- partitionFormat: String,
- timeFormat: String,
- period: String
- )
-
-
- final case class OdsModelingColumn(sourceTable: String,
- targetTable: String,
- sourceColumn: String,
- targetColumn: String,
- extraColumnExpression: String,
- incrementalColumn: Boolean,
- primaryKeyColumn: Boolean)
-
-
- final case class OdsModeling(odsTableConfig: OdsTableConfig, columns: Seq[OdsModelingColumn])
-
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParser.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParser.scala
deleted file mode 100644
index 180e8b9..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParser.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.excel.parser
-
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.ExcelUtil._
-import com.github.sharpdata.sharpetl.modeling.Exception.TableConfigHasDuplicateSourceAndTargetTableException
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdModelingSheetHeader._
-import com.github.sharpdata.sharpetl.modeling.excel.model.{DwdModeling, DwdModelingColumn, DwdModelingSheetHeader, DwdTableConfig, DwdTableConfigSheetHeader}
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdTableConfigSheetHeader._
-import com.github.sharpdata.sharpetl.modeling.excel.model._
-import org.apache.poi.ss.usermodel.Row
-
-/**
- * 支持配置文件中 dwd_etl_config sheet有多个table,多行table的source_table和target_table不重复
- * 支持配置文件中 dwd_config sheet有多个table etl 配置
- * dwd_etl_config 和 dwd_config 两个sheet根据source_table和target_table关联
- */
-object DwdTableParser {
- def readDwdConfig(filePath: String): Seq[DwdModeling] = {
-
- val dwdModelingSheet = {
- val modelingSheet = readSheet(filePath, DWD_MODELING_SHEET_NAME)
- implicit val headers: Map[String, Int] = readHeaders(modelingSheet.head)
- modelingSheet
- .tail
- .map(rowToDwdColumnEtl)
- }
-
- def readDwdModelingSheet(sourceTable: String, targetTable: String): Seq[DwdModelingColumn] = {
- dwdModelingSheet.filter(column => column.sourceTable == sourceTable && column.targetTable == targetTable)
- }
-
- def readDwdTableConfigSheet: Seq[DwdTableConfig] = {
- val tableConfigSheet = readSheet(filePath, DWD_TABLE_CONFIG_SHEET_NAME)
- implicit val headers: Map[String, Int] = readHeaders(tableConfigSheet.head)
- val tableConfigs = tableConfigSheet
- .tail
- .map(rowToDwdTableEtl)
- tableConfigs
- .groupBy(table => (table.sourceTable, table.targetTable))
- .foreach { // just for check duplicated table config
- case ((_, _), tables) =>
- if (tables.size > 1) {
- val errorMsg: String = s"In $DWD_TABLE_CONFIG_SHEET_NAME sheet, " +
- s"the ${DwdModelingSheetHeader.SOURCE_TABLE} and ${DwdModelingSheetHeader.TARGET_TABLE} config is duplicated."
- ETLLogger.error(
- s"""
- |error message:
- |$errorMsg
- |""".stripMargin)
- throw TableConfigHasDuplicateSourceAndTargetTableException(errorMsg)
- }
- }
- tableConfigs
- }
-
- readDwdTableConfigSheet
- .map {
- it => DwdModeling(it, readDwdModelingSheet(it.sourceTable, it.targetTable))
- }
- }
-
- private def rowToDwdTableEtl(implicit headers: Map[String, Int]): Row => DwdTableConfig = {
- row =>
- DwdTableConfig(
- sourceConnection = getStringCellOrNull(SOURCE_CONNECTION, row),
- sourceType = getStringCellOrNull(SOURCE_TYPE, row),
- sourceDb = getStringCellOrNull(DwdTableConfigSheetHeader.SOURCE_DB, row),
- sourceTable = getStringCellOrNull(DwdTableConfigSheetHeader.SOURCE_TABLE, row),
- targetConnection = getStringCellOrNull(TARGET_CONNECTION, row),
- targetType = getStringCellOrNull(TARGET_TYPE, row),
- targetDb = getStringCellOrNull(DwdTableConfigSheetHeader.TARGET_DB, row),
- targetTable = getStringCellOrNull(DwdTableConfigSheetHeader.TARGET_TABLE, row),
- factOrDim = getStringCellOrNull(FACT_OR_DIM, row),
- slowChanging = getBoolCell(SLOW_CHANGING, row),
- rowFilterExpression = getStringCellOrNull(ROW_FILTER_EXPRESSION, row),
- loadType = getStringCellOrNull(LOAD_TYPE, row),
- logDrivenType = getStringCellOrNull(LOG_DRIVEN_TYPE,row),
- upstream = getStringCellOrNull(UPSTREAM,row),
- dependsOn = getStringCellOrNull(DEPENDS_ON,row),
- defaultStart = getStringCellOrNull(DEFAULT_START,row)
- )
- }
-
- private def rowToDwdColumnEtl(implicit headers: Map[String, Int]): Row => DwdModelingColumn = {
- row =>
- DwdModelingColumn(
- sourceTable = getStringCellOrNull(DwdModelingSheetHeader.SOURCE_TABLE, row),
- targetTable = getStringCellOrNull(DwdModelingSheetHeader.TARGET_TABLE, row),
- sourceColumn = getStringCellOrNull(SOURCE_COLUMN, row),
- sourceColumnDescription = getStringCellOrNull(SOURCE_COLUMN_DESCRIPTION, row),
- targetColumn = getStringCellOrNull(TARGET_COLUMN, row),
- targetColumnType = getStringCellOrNull(TARGET_COLUMN_TYPE, row),
- extraColumnExpression = getStringCellOrNull(EXTRA_COLUMN_EXPRESSION, row),
- partitionColumn = getBoolCell(PARTITION_COLUMN, row),
- logicPrimaryColumn = getBoolCell(LOGIC_PRIMARY_COLUMN, row),
- joinDbConnection = getStringCellOrNull(JOIN_DB_CONNECTION, row),
- joinDbType = getStringCellOrNull(JOIN_DB_TYPE, row),
- joinDb = getStringCellOrNull(JOIN_DB, row),
- joinTable = getStringCellOrNull(JOIN_TABLE, row),
- joinOn = getStringCellOrNull(JOIN_ON, row),
- createDimMode = getStringCellOrNull(CREATE_DIM_MODE, row),
- joinTableColumn = getStringCellOrNull(JOIN_TABLE_COLUMN, row),
- businessCreateTime = getBoolCell(BUSINESS_CREATE_TIME, row),
- businessUpdateTime = getBoolCell(BUSINESS_UPDATE_TIME, row),
- ignoreChangingColumn = getBoolCell(IGNORE_CHANGING_COLUMN, row),
- qualityCheckRules = getStringCellOrNull(QUALITY_CHECK_RULES, row)
- )
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/OdsTableParser.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/OdsTableParser.scala
deleted file mode 100644
index 69a79c3..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/OdsTableParser.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.excel.parser
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.OdsTableConfigSheetHeader
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.ExcelUtil._
-import com.github.sharpdata.sharpetl.modeling.Exception.TableConfigHasDuplicateSourceAndTargetTableException
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdTableConfigSheetHeader.{DEFAULT_START, DEPENDS_ON, LOAD_TYPE, LOG_DRIVEN_TYPE, UPSTREAM}
-import com.github.sharpdata.sharpetl.modeling.excel.model.OdsModelingSheetHeader._
-import com.github.sharpdata.sharpetl.modeling.excel.model.OdsTable._
-import com.github.sharpdata.sharpetl.modeling.excel.model.OdsTableConfigSheetHeader._
-import org.apache.poi.ss.usermodel.Row
-
-object OdsTableParser {
- def readOdsConfig(filePath: String): Seq[OdsModeling] = {
-
- val dwdModelingSheet = {
- val modelingSheet = readSheet(filePath, ODS_MODELING_SHEET_NAME)
- implicit val headers: Map[String, Int] = readHeaders(modelingSheet.head)
- modelingSheet
- .tail
- .map(rowToOdsColumnEtl)
- }
-
- def readOdsModelingSheet(sourceTable: String, targetTable: String): Seq[OdsModelingColumn] = {
- dwdModelingSheet.filter(column => column.sourceTable == sourceTable && column.targetTable == targetTable)
- }
-
- def readOdsTableConfigSheet: Seq[OdsTableConfig] = {
- val tableConfigSheet = readSheet(filePath, ODS_TABLE_CONFIG_SHEET_NAME)
- implicit val headers: Map[String, Int] = readHeaders(tableConfigSheet.head)
- val tableConfigs = tableConfigSheet
- .tail
- .map(rowToOdsTableEtl)
- tableConfigs
- .groupBy(table => (table.sourceTable, table.targetTable))
- .foreach { // just for check duplicated table config
- case ((_, _), tables) =>
- if (tables.size > 1) {
- val errorMsg: String = s"In $ODS_TABLE_CONFIG_SHEET_NAME sheet, " +
- s"the ${SOURCE_TABLE} and ${TARGET_TABLE} config is duplicated."
- ETLLogger.error(
- s"""
- |error message:
- |$errorMsg
- |""".stripMargin)
- throw TableConfigHasDuplicateSourceAndTargetTableException(errorMsg)
- }
- }
- tableConfigs
- }
-
- readOdsTableConfigSheet
- .map {
- it => OdsModeling(it, readOdsModelingSheet(it.sourceTable, it.targetTable))
- }
- }
-
- private def rowToOdsTableEtl(implicit headers: Map[String, Int]): Row => OdsTableConfig = {
- row =>
- OdsTableConfig(
- sourceConnection = getStringCellOrNull(SOURCE_CONNECTION, row),
- sourceType = getStringCellOrNull(SOURCE_TYPE, row),
- sourceDb = getStringCellOrNull(OdsTableConfigSheetHeader.SOURCE_DB, row),
- sourceTable = getStringCellOrNull(OdsTableConfigSheetHeader.SOURCE_TABLE, row),
- targetConnection = getStringCellOrNull(TARGET_CONNECTION, row),
- targetType = getStringCellOrNull(TARGET_TYPE, row),
- targetDb = getStringCellOrNull(OdsTableConfigSheetHeader.TARGET_DB, row),
- targetTable = getStringCellOrNull(OdsTableConfigSheetHeader.TARGET_TABLE, row),
- filterExpression = getStringCellOrNull(FILTER_EXPR, row),
- loadType = getStringCellOrNull(LOAD_TYPE, row),
- logDrivenType = getStringCellOrNull(LOG_DRIVEN_TYPE,row),
- upstream = getStringCellOrNull(UPSTREAM,row),
- dependsOn = getStringCellOrNull(DEPENDS_ON,row),
- defaultStart = getStringCellOrNull(DEFAULT_START,row),
- partitionFormat = getStringCellOrNull(PARTITION_FORMAT, row),
- timeFormat = getStringCellOrDefault(TIME_FORMAT, row, "YYYY-MM-DD hh:mm:ss"),
- period = getNumericCell(PERIOD, row).toInt.toString
- )
- }
-
- private def rowToOdsColumnEtl(implicit headers: Map[String, Int]): Row => OdsModelingColumn = {
- row =>
- OdsModelingColumn(
- sourceTable = getStringCellOrNull(SOURCE_TABLE, row),
- targetTable = getStringCellOrNull(TARGET_TABLE, row),
- sourceColumn = getStringCellOrNull(SOURCE_COLUMN, row),
- targetColumn = getStringCellOrNull(TARGET_COLUMN, row),
- extraColumnExpression = getStringCellOrNull(EXTRA_COLUMN_EXPRESSION, row),
- incrementalColumn = getBoolCell(INCREMENTAL_COLUMN, row),
- primaryKeyColumn = getBoolCell(PRIMARY_COLUMN, row)
- )
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/dialect/SqlDialect.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/dialect/SqlDialect.scala
deleted file mode 100644
index 1092486..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/dialect/SqlDialect.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.dialect
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.{HIVE, INFORMIX, MS_SQL_SERVER, MYSQL, POSTGRES, SPARK_SQL}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuid
-
-// scalastyle:off
-
-/**
- * [[org.apache.spark.sql.jdbc.JdbcDialect]]
- */
-sealed trait SqlDialect {
- def quoteIdentifier(colName: String): String
-
- def year(colName: String, timeFormat: String): String
-
- def month(colName: String, timeFormat: String): String
-
- def day(colName: String, timeFormat: String): String
-
- def hour(colName: String, timeFormat: String): String
-
- def minute(colName: String, timeFormat: String): String
-}
-
-case object HiveDialect extends SqlDialect {
- override def quoteIdentifier(colName: String): String = s"`$colName`"
-
- override def year(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName, '$timeFormat'), 'yyyy')"
-
- override def month(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName, '$timeFormat'), 'MM')"
-
- override def day(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName, '$timeFormat'), 'dd')"
-
- override def hour(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName, '$timeFormat'), 'HH')"
-
- override def minute(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName, '$timeFormat'), 'mm')"
-}
-
-case object MysqlDialect extends SqlDialect {
- override def quoteIdentifier(colName: String): String = s"`$colName`"
-
- override def year(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName), '%Y')"
-
- override def month(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName), '%m')"
-
- override def day(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName), '%d')"
-
- override def hour(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName), '%H')"
-
- override def minute(colName: String, timeFormat: String): String = s"from_unixtime(unix_timestamp($colName), '%i')"
-}
-
-case object PostgresDialect extends SqlDialect {
- override def quoteIdentifier(colName: String): String = {
- if (colName.contains(".")) {
- colName.split('.').map(col => s""""$col"""").mkString(".")
- } else {
- s""""$colName""""
- }
- }
-
- override def year(colName: String, timeFormat: String): String = s"""to_char(${quoteIdentifier(colName)}, 'yyyy')"""
-
- override def month(colName: String, timeFormat: String): String = s"""to_char(${quoteIdentifier(colName)}, 'MM')"""
-
- override def day(colName: String, timeFormat: String): String = s"""to_char(${quoteIdentifier(colName)}, 'DD')"""
-
- override def hour(colName: String, timeFormat: String): String = s"""to_char(${quoteIdentifier(colName)}, 'HH24')"""
-
- override def minute(colName: String, timeFormat: String): String = s"""to_char(${quoteIdentifier(colName)}, 'MI')"""
-}
-
-case object MSSqlDialect extends SqlDialect {
- override def quoteIdentifier(colName: String): String = s"[$colName]"
-
- override def year(colName: String, timeFormat: String): String = ???
-
- override def month(colName: String, timeFormat: String): String = ???
-
- override def day(colName: String, timeFormat: String): String = ???
-
- override def hour(colName: String, timeFormat: String): String = ???
-
- override def minute(colName: String, timeFormat: String): String = ???
-}
-
-case object InformixSqlDialect extends SqlDialect {
- override def quoteIdentifier(colName: String): String = colName
-
- override def year(colName: String, timeFormat: String): String = ???
-
- override def month(colName: String, timeFormat: String): String = ???
-
- override def day(colName: String, timeFormat: String): String = ???
-
- override def hour(colName: String, timeFormat: String): String = ???
-
- override def minute(colName: String, timeFormat: String): String = ???
-}
-
-
-object SqlDialect {
- def quote(name: String, `type`: String): String = quoteIdentifier(name, `type`)
-
- def quoteIdentifier(colName: String, `type`: String): String = {
- `type` match {
- case HIVE | SPARK_SQL => HiveDialect.quoteIdentifier(colName)
- case MYSQL => MysqlDialect.quoteIdentifier(colName)
- case POSTGRES => PostgresDialect.quoteIdentifier(colName)
- case MS_SQL_SERVER => MSSqlDialect.quoteIdentifier(colName)
- case INFORMIX => InformixSqlDialect.quoteIdentifier(colName)
- }
- }
-
- def surrogateKey(`type`: String): String = {
- `type` match {
- case POSTGRES => "uuid_generate_v1()"
- case _ => "uuid()"
- }
- }
-
- def getSqlDialect(`type`: String): SqlDialect = {
- `type` match {
- case HIVE | SPARK_SQL => HiveDialect
- case MYSQL => MysqlDialect
- case POSTGRES => PostgresDialect
- case MS_SQL_SERVER => MSSqlDialect
- case INFORMIX => InformixSqlDialect
- }
- }
-}
-
-// scalastyle:on
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen.scala
deleted file mode 100644
index c5746b3..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.{CreateDimMode, DimTable, DwdModelingColumn}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.SPARK_SQL
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, DataSourceType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.getTempName
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-
-// scalastyle:off
-object AutoCreateDimSqlGen {
- val DISTINCT_COUNT_NUM = "distinct_count_num"
-
- def parseDimensionSql(sourceTableName: String,
- dimTable: DimTable): String = {
- lazy val joinOnCols = dimTable.joinOnColumns.map(column => s"`${column.joinTempFieldPrefix}${joinColOrSourceCol(column)}`")
- lazy val updateTimeCols = dimTable.updateTimeCols.map(column => s"`${joinColOrSourceCol(column)}`")
- lazy val partitionByClause = s",\n row_number() OVER (PARTITION BY ${joinOnCols.mkString(",")} ORDER BY ${updateTimeCols.mkString(",")} DESC) as row_number"
-
- val autoCreateColumnsSelectExpr = dimTable
- .autoCreateColumns
- .map(column => s"uuid() as `${column.joinTableColumn}`")
- val otherColumnsSelectExpr = dimTable.noneAutoCreateDimIdColumns
- .map(column =>
- s"`${column.joinTempFieldPrefix}${joinColOrSourceCol(column)}` as ${quote(joinColOrSourceCol(column), SPARK_SQL)}")
- val additionalColsSelectExpr = dimTable.additionalCols
- .map(column => s"`${column.sourceColumn}`")
- val selectClause = (autoCreateColumnsSelectExpr ++ (otherColumnsSelectExpr :+ s"'1' as ${quote("is_auto_created", SPARK_SQL)}") ++ additionalColsSelectExpr).mkString(",\n ")
- val partitionedSelectionExpr =
- (((dimTable.cols ++ dimTable.updateTimeCols ++ dimTable.createTimeCols)
- .map(col => quote(joinColOrSourceCol(col), SPARK_SQL)) :+ quote("is_auto_created", SPARK_SQL))
- ++ dimTable.partitionCols.map(col => quote(joinColOrSourceCol(col), SPARK_SQL))).mkString(",")
-
- val whereNotNullClause = dimTable
- .joinOnColumns
- .map(column => s"`${column.joinTempFieldPrefix}${joinColOrSourceCol(column)}` is not null").mkString("\n and")
-
- val once = dimTable.cols.exists(_.createDimMode == CreateDimMode.ONCE)
-
- if (once) {
- s"""
- |select $partitionedSelectionExpr from (
- | select $selectClause$partitionByClause
- | from $sourceTableName $sourceTableName
- | where ($whereNotNullClause)
- | and (`auto_created_${dimTable.dimTable}_status` = 'new')
- |) where row_number = 1
- |""".stripMargin
- } else {
- s"""
- |select $selectClause
- |from $sourceTableName $sourceTableName
- |where ($whereNotNullClause)
- | and (
- | `auto_created_${dimTable.dimTable}_status` = 'new'
- | or `auto_created_${dimTable.dimTable}_status` = 'updated')
- |""".stripMargin
- }
- }
-
- def tmpStepToTempStep(steps: List[WorkflowStep], index: Int, sourceTempTable: String, func: String, sql: String): List[WorkflowStep] = {
- val step = new WorkflowStep
- step.setStep(index.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TEMP)
- sourceConfig.setTableName(sourceTempTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceTempTable, func))
- step.setTargetConfig(targetConfig)
- step.setWriteMode(WriteMode.OVER_WRITE)
- step.setSqlTemplate(sql)
-
- steps :+ step
- }
-
- def parsePartitionClause(dimTables: Seq[DimTable], sourceTableName: String): String = {
- val cols = dimTables.flatMap(_.additionalCols).map(col => s"`$sourceTableName`.`${col.sourceColumn}` as `${col.sourceColumn}`").distinct
- if (cols.isEmpty) {
- ""
- } else {
- s",\n ${cols.mkString(",\n ")}"
- }
- }
-
- def genGroupedDimensionSql(dimTables: Seq[DimTable], tempTableName: String, sourceTableName: String): String = {
- val joinFactTempSelectClause = parseJoinFactTempSelectClause(dimTables)
- val joinFactTempJoinClause = parseJoinFactTempJoinClause(dimTables)
- val joinStatus = parseJoinStatusClause(dimTables)
- val partitionCols = parsePartitionClause(dimTables, sourceTableName)
-
- s"""
- |select $joinFactTempSelectClause,
- | $joinStatus$partitionCols
- |from $tempTableName `$sourceTableName`
- | $joinFactTempJoinClause
- |""".stripMargin
- }
-
- def parseJoinFactTempSelectClause(dimTables: Seq[DimTable]): String =
- dimTables
- .flatMap(_.noneAutoCreateDimIdColumns)
- .sortBy(_.joinTable)
- .map(column =>
- s"${sourceColOrExpr(column)} as `${column.joinTempFieldPrefix}${joinColOrSourceCol(column)}`"
- )
- .mkString(",\n ")
-
- def parseJoinFactTempJoinClause(dimTables: Seq[DimTable]): String =
- dimTables
- .sortBy(_.dimTable)
- .map(dimTable => {
- val joinOnClause = dimTable
- .joinOnColumns
- .map(column => s"${sourceColOrExpr(column)} = `${dimTable.dimTable}`.`${joinColOrSourceCol(column)}`")
- .mkString("\n and ")
- s"""| left join `${dimTable.cols.head.joinDb}`.`${dimTable.dimTable}` `${dimTable.dimTable}` -- TODO: year/month/day
- | on $joinOnClause
- |""".stripMargin
- })
- .mkString("")
- .trim
-
- def parseJoinStatusClause(dimTables: Seq[DimTable]): String = {
- def caseClause(dimTable: DimTable): String = {
- val newClause = s"""${dimTable.joinOnColumns.map(col => s"`${dimTable.dimTable}`.`${col.joinOn}` is null").mkString(" or ")}"""
-
- val updatedClause = dimTable
- .noneAutoCreateDimIdColumns
- .map(column =>
- s"${sourceColOrExpr(column)} != `${dimTable.dimTable}`.`${joinColOrSourceCol(column)}`"
- )
- .mkString(" or\n ")
-
- s"""case
- | when (
- | $newClause
- | ) then 'new'
- | when (
- | $updatedClause
- | ) then 'updated'
- | else 'nochange'
- | end as `auto_created_${dimTable.dimTable}_status`""".stripMargin
- }
-
-
- dimTables
- .sortBy(_.dimTable)
- .map(caseClause)
- .mkString(",\n ")
- }
-
- def joinColOrSourceCol(column: DwdModelingColumn): String = {
- if (isNullOrEmpty(column.joinTableColumn)) column.sourceColumn else column.joinTableColumn
- }
-
- def targetColOrSourceCol(column: DwdModelingColumn): String = {
- if (isNullOrEmpty(column.targetColumn)) column.sourceColumn else column.targetColumn
- }
-
- def sourceColOrExpr(column: DwdModelingColumn): String = {
- if (isNullOrEmpty(column.sourceColumn)) {
- if (column.extraColumnExpression.contains(" ")) {
- column.extraColumnExpression
- } else {
- s"`${column.sourceTable}`.`${column.extraColumnExpression}`"
- }
- } else {
- s"`${column.sourceTable}`.`${column.sourceColumn}`"
- }
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen2.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen2.scala
deleted file mode 100644
index 84210b8..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimSqlGen2.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.{CreateDimMode, DwdModeling}
-import com.google.gson.JsonObject
-import com.github.sharpdata.sharpetl.core.datasource.config.{DBDataSourceConfig, TransformationDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, DataSourceType}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{canNotBeEmpty, getTempName, isNullOrEmpty}
-import DwdExtractSqlGen.getTargetColumn
-
-import scala.collection.mutable
-
-// scalastyle:off
-object AutoCreateDimSqlGen2 {
- def genAutoCreateDimStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): (List[WorkflowStep], Int) = {
- var index = stepIndex - 1
- val stepsTemp: List[WorkflowStep] = dwdModding.columns
- .filterNot(column => isNullOrEmpty(column.joinTable))
- .filter(column => CreateDimMode.ALWAYS.equals(column.createDimMode) || CreateDimMode.ONCE.equals(column.createDimMode))
- .groupBy(column => (column.joinDbType, column.joinDb, column.joinTable))
- .toList
- .sortBy(_._1._3)
- .map {
- case ((joinDbType, joinDb, joinTable), dimColumns) => {
- val createDimMode = dimColumns.head.createDimMode
- val updateTable = getTempName(dwdModding.dwdTableConfig.sourceTable, "extracted")
- val dimDbType = joinDbType
- val dimDb = joinDb
- val dimTable = joinTable
- val currentBusinessCreateTime = getTargetColumn(dwdModding.columns.filter(_.businessCreateTime).head)
-
- val dimTableColumnsAndType = new JsonObject()
- val currentAndDimColumnsMapping = new JsonObject()
- val currentAndDimPrimaryMapping = new JsonObject()
- // TODO 缺少维度表的创建时间配置,此处手动添加
- dimTableColumnsAndType.addProperty("create_time", "timestamp")
- currentAndDimColumnsMapping.addProperty("order_create_time", "create_time")
- dimColumns
- .filterNot(column => isNullOrEmpty(column.joinTableColumn))
- .filter(column => isNullOrEmpty(column.extraColumnExpression))
- .foreach {
- column => {
- dimTableColumnsAndType.addProperty(column.joinTableColumn, column.targetColumnType)
- currentAndDimColumnsMapping.addProperty(getTargetColumn(column), column.joinTableColumn)
- if (!isNullOrEmpty(column.joinOn)) {
- currentAndDimPrimaryMapping.addProperty(getTargetColumn(column), column.joinTableColumn)
- }
- }
- }
-
- val step = new WorkflowStep
- index = index + 1
- step.setStep(index.toString)
-
- val sourceConfig = new TransformationDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TRANSFORMATION)
- sourceConfig.className = "com.github.sharpdata.sharpetl.spark.transformation.JdbcAutoCreateDimTransformer"
- sourceConfig.methodName = "transform"
-
- sourceConfig.args = Map(
- canNotBeEmpty("updateTable", updateTable),
- canNotBeEmpty("createDimMode", createDimMode),
- canNotBeEmpty("dimDbType", dimDbType),
- canNotBeEmpty("dimDb", dimDb),
- canNotBeEmpty("dimTable", dimTable),
- canNotBeEmpty("currentBusinessCreateTime", currentBusinessCreateTime),
- canNotBeEmpty("dimTableColumnsAndType", dimTableColumnsAndType.toString()),
- canNotBeEmpty("currentAndDimColumnsMapping", currentAndDimColumnsMapping.toString()),
- canNotBeEmpty("currentAndDimPrimaryMapping", currentAndDimPrimaryMapping.toString())
- )
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.DO_NOTHING)
- step.setTargetConfig(targetConfig)
-
- step
- }
- }
- (steps ++ stepsTemp, index + 1)
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdExtractSqlGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdExtractSqlGen.scala
deleted file mode 100644
index 6072a0c..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdExtractSqlGen.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.{DwdModeling, DwdModelingColumn}
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.Constants.{DataSourceType, IncrementalType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.jobIdColumn
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{getTempName, isNullOrEmpty}
-import SqlDialect.quote
-
-import scala.collection.mutable.ArrayBuffer
-
-object DwdExtractSqlGen {
- val ZIP_ID_FLAG = "zip_id_flag"
-
- def genExtractStep(dwdModding: DwdModeling, stepIndex: Int): List[WorkflowStep] = {
- val sourceType: String = dwdModding.dwdTableConfig.sourceType
- val rowFilterExpression = if(isNullOrEmpty(dwdModding.dwdTableConfig.rowFilterExpression)) "" else "and " + dwdModding.dwdTableConfig.rowFilterExpression
- val steps = ArrayBuffer[WorkflowStep]()
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(dwdModding.dwdTableConfig.sourceType)
- sourceConfig.setDbName(dwdModding.dwdTableConfig.sourceDb)
- sourceConfig.setTableName(dwdModding.dwdTableConfig.sourceTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceConfig.tableName, "extracted"))
- step.setTargetConfig(targetConfig)
-
- step.setWriteMode(WriteMode.OVER_WRITE)
-
- // TODO 抽象接口,不同引擎有不同的实现。
- val selectColumn = dwdModding.columns
- .map {
- column => {
- if (ZIP_ID_FLAG.equals(column.extraColumnExpression)) {
- if (isNullOrEmpty(column.joinTable)) {
- s"\t${SqlDialect.surrogateKey(dwdModding.dwdTableConfig.sourceType)} as ${quote(getTargetColumn(column), sourceType)}"
- } else {
- ""
- }
- } else if (isNullOrEmpty(column.extraColumnExpression)) {
- s"\t${quote(getSourceColumn(column), sourceType)} as ${quote(getTargetColumn(column), sourceType)}"
- } else {
- if (isNullOrEmpty(getTargetColumn(column))) {
- "" // extract from fact col to dim col, so we didn't need select it here, we could use the expr later
- } else {
- s"\t${column.extraColumnExpression} as ${quote(getTargetColumn(column), sourceType)}"
- }
- }
- }
- }
- .filterNot(isNullOrEmpty)
- .mkString(s",$ENTER")
-
- val partitionCols = dwdModding.columns.filter(_.partitionColumn).map(_.sourceColumn)
-
- val whereClause = if (partitionCols.isEmpty) {
- s"${quote(jobIdColumn, sourceType)} = '$${DATA_RANGE_START}'"
- } else {
- // for year,month,day,hour,minute
- partitionCols.map(col => s"${quote(col, sourceType)} = '$${${col.toUpperCase}}'")
- .mkString("\n and ")
- }
-
- val selectSql =
- s"""
- |select
- |$selectColumn
- |from ${quote(dwdModding.dwdTableConfig.sourceDb, sourceType)}.${quote(dwdModding.dwdTableConfig.sourceTable, sourceType)}
- |where $whereClause
- |$rowFilterExpression
- |""".stripMargin
- step.setSqlTemplate(selectSql)
-
-
- steps.append(step)
- steps.toList
- }
-
- def getSourceColumn(column: DwdModelingColumn): String = {
- if (isNullOrEmpty(column.sourceColumn)) "" else column.sourceColumn
- }
-
- def getTargetColumn(column: DwdModelingColumn): String = {
- if (isNullOrEmpty(column.targetColumn)) getSourceColumn(column) else column.targetColumn
- }
-
-
-
-
-
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdLoadSqlGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdLoadSqlGen.scala
deleted file mode 100644
index bd3016f..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdLoadSqlGen.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdModeling
-import com.google.gson.JsonObject
-import com.github.sharpdata.sharpetl.core.datasource.config.{DBDataSourceConfig, TransformationDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, LoadType, DataSourceType}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{canNotBeEmpty, isNullOrEmpty}
-import com.github.sharpdata.sharpetl.modeling.excel.model.FactOrDim.DIM
-import DwdExtractSqlGen.ZIP_ID_FLAG
-
-import scala.collection.mutable
-
-object DwdLoadSqlGen {
- def genLoadStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): List[WorkflowStep] = {
- val factOrDim = dwdModding.dwdTableConfig.factOrDim
- var isSlowChanging = false
- if (DIM.equals(factOrDim)) {
- isSlowChanging = true
- } else {
- isSlowChanging = false
- }
- isSlowChanging = dwdModding.dwdTableConfig.slowChanging
-
- val currentDb = dwdModding.dwdTableConfig.targetDb
- val currentDbType = dwdModding.dwdTableConfig.targetType
- val currentTable = dwdModding.dwdTableConfig.targetTable
-
- val updateTable = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
-
- val sourceType: String = dwdModding.dwdTableConfig.sourceType
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new TransformationDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TRANSFORMATION)
- sourceConfig.className = "com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer"
- sourceConfig.methodName = "transform"
- // TODO check:有targetColumn必须有targetColumnType,此处去掉了zipIdColumn
- val columnTypeMap = new JsonObject()
- dwdModding.columns
- .filterNot(column => isNullOrEmpty(column.targetColumn))
- .filterNot(column => ZIP_ID_FLAG.equals(column.extraColumnExpression) && isNullOrEmpty(column.joinTable))
- .foreach {
- column => {
- columnTypeMap.addProperty(column.targetColumn, column.targetColumnType)
- }
- }
-
- sourceConfig.args = Map(
- canNotBeEmpty("updateType", if (dwdModding.dwdTableConfig.loadType == "full") LoadType.FULL else LoadType.INCREMENTAL),
- canNotBeEmpty("slowChanging", isSlowChanging.toString),
- canNotBeEmpty("updateTable", updateTable),
- canNotBeEmpty("currentDb", currentDb),
- canNotBeEmpty("currentDbType", currentDbType),
- canNotBeEmpty("currentTable", currentTable),
- canNotBeEmpty("primaryFields", dwdModding.columns.filter(_.logicPrimaryColumn).map(_.targetColumn).mkString(",")),
- // TODO create time和update time不关心多列的情况
- canNotBeEmpty("businessCreateTime", dwdModding.columns.filter(_.businessCreateTime).map(_.targetColumn).head),
- canNotBeEmpty("businessUpdateTime", dwdModding.columns.filter(_.businessUpdateTime).map(_.targetColumn).head),
- canNotBeEmpty("currentTableColumnsAndType", columnTypeMap.toString())
- )
- // if (isSlowChanging) {
- // // TODO check:渐变时,一定要满足有zip_id_flag标识的column,且jointable为空。且该column唯一。
- // source.args += ("currentTableZipColumn" -> dwdModding.columns
- // .filter(it => ZIP_ID_FLAG.equals(it.extraColumnExpression) && isNullOrEmpty(it.joinTable))
- // .map(_.targetColumn).head)
- // }
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.DO_NOTHING)
- step.setTargetConfig(targetConfig)
- steps :+ step
- }
-
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen.scala
deleted file mode 100644
index a81f515..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdModeling
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.{HIVE, SPARK_SQL}
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, DataSourceType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{getTempName, isNullOrEmpty}
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-import DwdExtractSqlGen.{ZIP_ID_FLAG, getTargetColumn}
-
-import scala.collection.mutable
-
-object DwdTransformSqlGen {
- def genTargetSelectStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): List[WorkflowStep] = {
- val sourceType: String = SPARK_SQL
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TEMP)
- val sourceTable = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
- sourceConfig.setTableName(sourceTable)
-
- if (dwdModding.columns.filterNot(column => isNullOrEmpty(column.qualityCheckRules)).size != 0) {
- val options = mutable.Map[String, String]()
- val logicPrimaryColumn = dwdModding.columns.filter(_.logicPrimaryColumn).map(_.targetColumn).mkString(",")
- options.put("idColumn", logicPrimaryColumn)
- dwdModding.columns.filterNot(column => isNullOrEmpty(column.qualityCheckRules)) foreach (column => {
- options.put(s"column.${getTargetColumn(column)}.qualityCheckRules", column.qualityCheckRules)
- })
- sourceConfig.setOptions(options.toMap)
- }
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceTable, "target_selected"))
- step.setTargetConfig(targetConfig)
-
- step.setWriteMode(WriteMode.OVER_WRITE)
-
- val autoCreatedClause = if (dwdModding.dwdTableConfig.factOrDim.toLowerCase == "dim") {
- s"\t'0' as ${quote("is_auto_created", sourceType)}"
- } else {
- ""
- }
-
- var selectColumn = ""
- if (dwdModding.dwdTableConfig.targetType == HIVE) {
- selectColumn = (dwdModding.columns
- .filterNot(it => isNullOrEmpty(it.targetColumn))
- .map {
- column => {
- // no need to use `as` because all columns already the target col name
- s"\t${quote(getTargetColumn(column), sourceType)}"
- }
- } :+ autoCreatedClause)
- .filterNot(isNullOrEmpty)
- .mkString(s",$ENTER")
- } else {
- selectColumn = (dwdModding.columns
- .filterNot(it => isNullOrEmpty(it.targetColumn))
- .filterNot(it => isNullOrEmpty(it.joinTable) && ZIP_ID_FLAG.equals(it.extraColumnExpression))
- .map {
- column => {
- // no need to use `as` because all columns already the target col name
- s"\t${quote(getTargetColumn(column), sourceType)}"
- }
- } :+ autoCreatedClause)
- .filterNot(isNullOrEmpty)
- .mkString(s",$ENTER")
- }
-
- val selectSql =
- s"""
- |select
- |$selectColumn
- |from ${quote(sourceTable, sourceType)}
- |""".stripMargin
-
- step.setSqlTemplate(selectSql)
-
- steps :+ step
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen2.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen2.scala
deleted file mode 100644
index d32d210..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdTransformSqlGen2.scala
+++ /dev/null
@@ -1,204 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.DwdModeling
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.HIVE
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, DataSourceType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{getTempName, isNullOrEmpty}
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-import AutoCreateDimSqlGen.joinColOrSourceCol
-import DwdExtractSqlGen.{ZIP_ID_FLAG, getTargetColumn}
-
-object DwdTransformSqlGen2 {
- def generateReadMatchTableStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): (List[WorkflowStep], Int) = {
- var index = stepIndex - 1 // we will add it back in the loop
- val stepsTemp: List[WorkflowStep] = dwdModding.columns
- // TODO check joinDbType、joinDb和joinTable必须同时有值或没值
- .filter(column => !isNullOrEmpty(column.joinTable))
- .groupBy(column => (column.joinDbType, column.joinDb, column.joinTable))
- .toList
- .sortBy(_._1._3)
- .map {
- case ((joinDbType, joinDb, joinTable), dimColumns) => {
- val step = new WorkflowStep
- index = index + 1
- step.setStep(index.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(dimColumns.head.joinDbType)
- sourceConfig.setDbName(joinDb)
- sourceConfig.setTableName(joinTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(joinDb + "_" + joinTable, "matched"))
- step.setTargetConfig(targetConfig)
- step.setWriteMode(WriteMode.APPEND)
- val selectColumn = dimColumns
- .map {
- column => {
- if (ZIP_ID_FLAG.equals(column.extraColumnExpression)) {
- quote(column.joinTableColumn, joinDbType)
- } else if (!isNullOrEmpty(column.joinOn)) {
- quote(column.joinOn, joinDbType)
- } else {
- ""
- }
- }
- }
- .filterNot(isNullOrEmpty)
- .mkString(", ")
-
- val selectSql =
- s"""
- |select
- | $selectColumn, ${quote("start_time", joinDbType)}, ${quote("end_time", joinDbType)}
- |from ${quote(joinDb, joinDbType)}.${quote(joinTable, joinDbType)}
- |""".stripMargin
- step.setSqlTemplate(selectSql)
-
- step
- }
- }
- .toList
- (steps ++ stepsTemp, index + 1)
- }
-
- def genMatchStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): (List[WorkflowStep], Int) = {
- val extractTableName = steps.head.target.asInstanceOf[DBDataSourceConfig].tableName
- val extractTableNameDialect = quote(extractTableName, HIVE)
- val businessTimeColumnName = getTargetColumn(dwdModding.columns.filter(column => column.businessCreateTime).head)
-
- val surrogateKeys = dwdModding.columns.filter(_.extraColumnExpression == ZIP_ID_FLAG).map(_.targetColumn).mkString("|")
-
- val index = stepIndex
- val sqlParts = dwdModding.columns
- .filterNot(column => isNullOrEmpty(column.joinTable))
- .groupBy(column => (column.joinDbType, column.joinDb, column.joinTable))
- .toList
- .sortBy(_._1._3)
- .map {
- case ((_, joinDb, joinTable), dimColumns) => {
- val joinTableName = quote(getTempName(joinDb + "_" + joinTable, "matched"), HIVE)
-
- val selectColumn = dimColumns
- .filter(column => ZIP_ID_FLAG.equals(column.extraColumnExpression))
- .head
- val selectColumnName = quote(selectColumn.joinTableColumn, HIVE)
- val selectTargetColumnName = quote(selectColumn.targetColumn, HIVE)
- val selectStr =
- s"""case when $joinTableName.$selectColumnName is null then '-1'
- |\t\telse $joinTableName.$selectColumnName end as $selectTargetColumnName""".stripMargin
-
- val joinOnColumn = dimColumns
- .filter(column => !isNullOrEmpty(column.joinOn))
- .head
- val joinOnStr =
- s"""left join $joinTableName
- | on $extractTableNameDialect.${quote(getTargetColumn(joinOnColumn), HIVE)} = $joinTableName.${quote(joinOnColumn.joinOn, HIVE)}
- | and $extractTableNameDialect.${quote(businessTimeColumnName, HIVE)} >= $joinTableName.${quote("start_time", HIVE)}
- | and ($extractTableNameDialect.${quote(businessTimeColumnName, HIVE)} < $joinTableName.${quote("end_time", HIVE)}
- | or $joinTableName.${quote("end_time", HIVE)} is null)
- |""".stripMargin
- (selectStr, joinOnStr)
- }
- }
- if (sqlParts.isEmpty) {
- (steps, index)
- } else {
- val step = new WorkflowStep
- step.setStep(index.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TEMP)
- sourceConfig.setTableName(extractTableName)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceConfig.tableName, "joined"))
- step.setTargetConfig(targetConfig)
- step.setWriteMode(WriteMode.APPEND)
- val selectSql =
- s"""select
- |\t$extractTableNameDialect.*,
- |\t${sqlParts.map(o => o._1).mkString(",\n\t")}
- |from $extractTableNameDialect
- |${sqlParts.map(o => o._2).mkString(ENTER)}
- |""".stripMargin
- step.setSqlTemplate(selectSql)
-
- (steps :+ step, index + 1)
- }
- }
-
-
- def genMatchStepInHive(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): (List[WorkflowStep], Int) = {
- val extractTableName = steps.head.target.asInstanceOf[DBDataSourceConfig].tableName
- val extractTableNameDialect = quote(extractTableName, HIVE)
- val businessTimeColumnName = getTargetColumn(dwdModding.columns.filter(column => column.businessCreateTime).head)
-
- val index = stepIndex
- val sqlParts = dwdModding.columns
- .filterNot(column => isNullOrEmpty(column.joinTable))
- .groupBy(column => (column.joinDbType, column.joinDb, column.joinTable))
- .toList
- .sortBy(_._1._3)
- .map {
- case ((_, joinDb, joinTable), dimColumns) => {
- val joinTableName = quote(joinTable, HIVE)
-
- val selectColumn = dimColumns
- .filter(column => ZIP_ID_FLAG.equals(column.extraColumnExpression))
- .head
- val selectColumnName = quote(selectColumn.targetColumn, HIVE)
- val selectStr =
- s"""case when $joinTableName.${quote(joinColOrSourceCol(selectColumn), HIVE)} is null then '-1'
- |\t\telse $joinTableName.${quote(joinColOrSourceCol(selectColumn), HIVE)}
- |\tend as $selectColumnName""".stripMargin
-
- val joinOnColumn = dimColumns
- .filter(column => !isNullOrEmpty(column.joinOn))
- .head
- val joinOnStr =
- s"""left join ${quote(joinDb, HIVE)}.$joinTableName $joinTableName
- | on $extractTableNameDialect.${quote(getTargetColumn(joinOnColumn), HIVE)} = $joinTableName.${quote(joinOnColumn.joinOn, HIVE)}
- | and $extractTableNameDialect.${quote(businessTimeColumnName, HIVE)} >= $joinTableName.${quote("start_time", HIVE)}
- | and ($extractTableNameDialect.${quote(businessTimeColumnName, HIVE)} < $joinTableName.${quote("end_time", HIVE)}
- | or $joinTableName.${quote("end_time", HIVE)} is null)
- |""".stripMargin
- (selectStr, joinOnStr)
- }
- }
- if (sqlParts.isEmpty) {
- (steps, index)
- } else {
- val step = new WorkflowStep
- step.setStep(index.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TEMP)
- sourceConfig.setTableName(extractTableName)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceConfig.tableName, "joined"))
- step.setTargetConfig(targetConfig)
- step.setWriteMode(WriteMode.APPEND)
- val selectSql =
- s"""select
- |\t$extractTableNameDialect.*,
- |\t${sqlParts.map(o => o._1).mkString(",\n\t")}
- |from $extractTableNameDialect
- |${sqlParts.map(o => o._2).mkString(ENTER)}
- |""".stripMargin
- step.setSqlTemplate(selectSql)
-
- (steps :+ step, index + 1)
- }
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdWorkflowGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdWorkflowGen.scala
deleted file mode 100644
index 5a901ee..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/DwdWorkflowGen.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.{CreateDimMode, DimTable, DwdModeling, DwdTableConfig, FactOrDim}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.{Workflow, WorkflowStep}
-import com.github.sharpdata.sharpetl.core.util.Constants.LoadType._
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType._
-import com.github.sharpdata.sharpetl.modeling.excel.model._
-import AutoCreateDimSqlGen._
-import AutoCreateDimSqlGen2.genAutoCreateDimStep
-import DwdExtractSqlGen._
-import DwdLoadSqlGen.genLoadStep
-import DwdTransformSqlGen.genTargetSelectStep
-import DwdTransformSqlGen2._
-import ScdSqlGen._
-
-// scalastyle:off
-object DwdWorkflowGen {
- def genWorkflow(dwdModding: DwdModeling, workflowName: String): Workflow = {
- var steps: List[WorkflowStep] = List.empty
- var index = 1
- val dwdTableConfig: DwdTableConfig = dwdModding.dwdTableConfig
- // 1. 【创建step,target:temp】:从上一层获取后去数据,目标:temp
- // 1.1 行filter
- // 1.2 添加多余的列
- // 注: 此时,不进行列filter,后续step需要使用
- steps = genExtractStep(dwdModding, index)
- index += 1
-
- // 2. 和主数据关联
- // 2.1 判断create_dim_mode
- val createDimModeToCols = dwdModding.columns.filterNot(it => isNullOrEmpty(it.joinTable)).groupBy(_.createDimMode)
-
- // 2.1.1 always :【创建step,target:postgres】:
- // 基于business_create_time进行升序排序并选取join_table_column中指定的列进行去重,之后每条数据依次进行渐变(注:需要标记该数据来自于维度表)
- // 2.1.2 once :【创建step,target:postgres】:
- // 基于business_create_time进行升序排序选取第一条数据,并选取join_table_column中指定的列,之后将该条数据insert到dim表中,(注:check该条数据是否存在,同时需要标记该数据来自于维度表)
- val partitionCols = dwdModding.columns.filter(_.partitionColumn)
- val updateTimeCols = dwdModding.columns.filter(_.businessUpdateTime)
- val createTimeCols = dwdModding.columns.filter(_.businessCreateTime)
- val nonNeverCreateDimTables: Seq[DimTable] = (
- createDimModeToCols
- .getOrElse(CreateDimMode.ALWAYS, Seq()) ++
- createDimModeToCols
- .getOrElse(CreateDimMode.ONCE, Seq())
- )
- .groupBy(_.joinTable)
- .map { case (dimTable, cols) => DimTable(dimTable, cols, partitionCols, updateTimeCols, createTimeCols) }
- .toSeq
-
- if (nonNeverCreateDimTables.nonEmpty && dwdTableConfig.targetType == HIVE) {
- val extractTempTableName = steps.head.target.asInstanceOf[DBDataSourceConfig].tableName
- val sql = genGroupedDimensionSql(nonNeverCreateDimTables, extractTempTableName, dwdTableConfig.sourceTable)
- steps = tmpStepToTempStep(steps, index, extractTempTableName, "grouped_dim", sql)
- index += 1
-
- val groupedDimTempTable = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
-
- nonNeverCreateDimTables.sortBy(_.dimTable).foreach { dimTable =>
- steps = tmpStepToTempStep(steps, index, groupedDimTempTable, "selected_dim", parseDimensionSql(groupedDimTempTable, dimTable))
- val dimView = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
- index += 1
- steps = genDwdPartitionClauseStep(steps, dimTable, index)
- index += 1
- steps = genDwdViewStep(steps, dimTable, index)
- val dwView = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
- index += 1
- steps = genScdStep(steps, dwdModding, dimTable, index, dimView, dwView)
- index += 1
- }
- }
-
- // 2.2 和主数据关联【创建step,target:temp】,获取id信息,基于source_column和join_on的对应关系,
- // 和对应的join_table表进行join,同时基于业务时间business_create_time进行维度表的start_time和end_time进行join
- // 获取join_table_column中对应target_column中extra_column_expression标识为'zip_id_flag'的值,此时zipId的命名以target_column列为准
- // 注:这一步仅仅是获取各个维度数据的zipId
- if (dwdTableConfig.targetType != HIVE) {
- val (autoCreateDimTableSteps, autoCreateDimNextTempIndex) = genAutoCreateDimStep(steps, dwdModding, index)
- steps = autoCreateDimTableSteps
- index = autoCreateDimNextTempIndex
- val (readMatchTableSteps, nextTempIndex) = generateReadMatchTableStep(steps, dwdModding, index)
- steps = readMatchTableSteps
- index = nextTempIndex
- }
-
- val (matchStep, nextIndex) = if (dwdTableConfig.targetType == HIVE) genMatchStepInHive(steps, dwdModding, index) else genMatchStep(steps, dwdModding, index)
- steps = matchStep
- index = nextIndex
-
- // 3. 质量check
- // 3.1 逻辑主键缺失check【创建step,target: temp】,此时判断logic_primary_column的逻辑主键中是否包含维度数据(判断逻辑为logic_primary_column中为true的配置列join_table是否有值)
- // 3.1.1 逻辑主键中不包括维度数据。check逻辑主键列的null判断
- // 3.1.2 逻辑主键中包括维度数据。check逻辑主键中非维度列进行null判断,维度列进行-1判断(注:定义清楚是-1还是'-1')
- // 3.2 逻辑主键缺失数据入库【创建step,target: postgres】
- // 3.3 逻辑主键缺失数据删除【创建step, target: temp】
- // 3.4 逻辑主键重复check【创建step,target: temp】
- // 3.5 逻辑主键重复数据入库【创建step,target: postgres】
- // 3.6 逻辑主键重复数据删除【创建step, target: temp】
- // 3.7 主要字段缺失数据check+入库【该配置目前没有,也可看做自定义业务扩展,无需配置】
- // 3.8 和主数据关联不上的check【创建step,target: temp】(注:目前不考虑事实表和事实表关联的check)
- // 3.9 和主数据关联不上数据入库【创建step,target: postgres】
-
- // 4. 常规逻辑处理
- // 4.1 删除多余的列【创建step,target: temp】
- steps = genTargetSelectStep(steps, dwdModding, index)
- index += 1
-
- // 5. load【场景比较复杂,暂时先考虑如下备注中的场景】
- // 5.1 场景:全量+渐变,获取渐变数据delete/update/insert,并基于逻辑主键渐变入库【创建step,target:xxx】
- // 5.2 场景:全量+非渐变,truncate+insert入库【创建step,target:xxx】
- // 5.3 场景:增量+渐变,基于逻辑主键渐变入库【创建step,target:xxx】
- // 5.4 场景:增量+非渐变,及与逻辑主键update入库【创建step,target:xxx】
-
- // HIVE/INCREMENTAL/DIM/slowChanging
- if (dwdTableConfig.targetType == HIVE && dwdTableConfig.loadType == INCREMENTAL
- && dwdTableConfig.factOrDim == FactOrDim.DIM && dwdTableConfig.slowChanging) {
- steps = genDwdPartitionClauseStep(steps, dwdModding, index)
- index += 1
- steps = genDwdViewStep(steps, dwdModding, index)
- index += 1
- steps = genScdStep(steps, dwdModding, index)
- index += 1
- } // HIVE/INCREMENTAL/FACT/slowChanging
- else if (dwdTableConfig.targetType == HIVE && dwdTableConfig.loadType == INCREMENTAL
- && dwdTableConfig.factOrDim == FactOrDim.FACT && dwdTableConfig.slowChanging) {
- steps = genDwdPartitionClauseStep(steps, dwdModding, index)
- index += 1
- steps = genDwdViewStep(steps, dwdModding, index)
- index += 1
- steps = genScdStep(steps, dwdModding, index)
- index += 1
- } // HIVE/INCREMENTAL/FACT/no slowChanging
- else if (dwdTableConfig.targetType == HIVE && dwdTableConfig.loadType == INCREMENTAL
- && dwdTableConfig.factOrDim == FactOrDim.FACT && !dwdTableConfig.slowChanging) {
- steps = genDwdPartitionClauseStep(steps, dwdModding, index, isSCD = false)
- index += 1
- steps = genDwdViewStep(steps, dwdModding, index)
- index += 1
- steps = genScdStep(steps, dwdModding, index, isSCD = false)
- index += 1
- }
- else {
- steps = genLoadStep(steps, dwdModding, index)
- }
- Workflow(workflowName, "", dwdTableConfig.loadType,
- dwdTableConfig.logDrivenType,dwdTableConfig.upstream
- ,dwdTableConfig.dependsOn, null, 0, dwdTableConfig.defaultStart, false, null, Map(), steps
- )
- // scalastyle:on
-
- /**
- * load逻辑备注
- * 问题:指定时间窗口的全量数据是全量么?如果是增量,就意味着要基于逻辑主键修改(这样做有问题),如果是全量,可以基于时间窗口修改
- * 迟到维的问题
- * 全量(真正的全量)(如果是不同逻辑的全量,需要先将数据补充成全量数据,自定义扩展)
- * 渐变
- * 基于业务的update_time(表中没有使用create_time)进行渐变。注意:如果待更新的数据时间大于表中的最大start_time,直接报错(还是取消更新,邮件告警?如果取消,取消粒度是单个数据还是批次?)
- * 不渐变
- * truncate全表,insert 新数据(可以暂时不用考虑更新时间小于表中更新时间的情况,否则又会引出这批全量数据导出时间的概念,配置会更复杂)
- * 增量
- * 渐变
- * 根据逻辑主键基于业务的update_time(表中没有使用create_time)进行渐变。注意:如果发现待更新的数据时间大于表中的最大start_time,直接报错(还是取消更新,邮件告警?如果取消,取消粒度是单个数据还是批次?)
- * 不渐变
- * 根据逻辑主键基于业务的update_time进行更新,如果更新的数据时间大于表中的更新时间,取消更新(防止旧数据覆盖新数据)
- */
- // TODO 建议step拆分到比较细的粒度,方便后续自定义sql的修改,同时step的拆分目的是结果temp可以在后续step的复用。该规范待确认
- }
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGen.scala
deleted file mode 100644
index 9dde899..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGen.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.OdsTable.{OdsModeling, OdsModelingColumn}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config._
-import com.github.sharpdata.sharpetl.core.syntax.{Workflow, WorkflowStep}
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.HIVE
-import com.github.sharpdata.sharpetl.core.util.Constants.IncrementalType._
-import com.github.sharpdata.sharpetl.core.util.Constants.LoadType._
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.partitionColumn
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.{getSqlDialect, quote}
-
-object OdsWorkflowGen {
-
- def genWorkflow(odsModeling: OdsModeling, workflowName: String): Workflow = {
- val step = new WorkflowStep()
- step.step = "1"
- step.source = getDataSourceConfig(odsModeling)
- step.target = getTargetSourceConfig(odsModeling)
-
-
- val dataSourceType = odsModeling.odsTableConfig.sourceType
- val additionalCols = List(s"'$${JOB_ID}' AS ${quote("job_id", dataSourceType)}")
- val columns = buildColumnString(odsModeling, additionalCols)
- val sourceDb = quote(odsModeling.odsTableConfig.sourceDb, dataSourceType)
- val sourceTable = quote(odsModeling.odsTableConfig.sourceTable, dataSourceType)
- val rowFilterExpression = if(isNullOrEmpty(odsModeling.odsTableConfig.filterExpression)) "" else "AND " + odsModeling.odsTableConfig.filterExpression
- val steps = odsModeling.odsTableConfig.loadType match {
- case INCREMENTAL =>
- step.writeMode = if (dataSourceType == HIVE) WriteMode.OVER_WRITE else WriteMode.APPEND
- val filterColumnName = quote(incrColumn(odsModeling), dataSourceType)
- val partitionClause = genPartitionClause(odsModeling)
-
- step.sqlTemplate =
- s"""|SELECT $columns$partitionClause
- |FROM $sourceDb.$sourceTable
- |WHERE $filterColumnName >= '$${DATA_RANGE_START}' AND $filterColumnName < '$${DATA_RANGE_END}'
- |$rowFilterExpression
- |""".stripMargin
- List(step)
- case FULL | DIFF =>
- step.writeMode = WriteMode.OVER_WRITE
- step.sqlTemplate =
- s"""|SELECT $columns,\n '$${DATA_RANGE_START}' AS $partitionColumn
- |FROM $sourceDb.$sourceTable
- |$rowFilterExpression
- |""".stripMargin
- List(step)
- case AUTO_INC_ID =>
- step.target = new VariableDataSourceConfig()
- val idColumn = odsModeling.columns.filter(_.primaryKeyColumn).head.sourceColumn
- step.sqlTemplate =
- s"""
- |SELECT $${DATA_RANGE_START} AS ${quote("lowerBound", dataSourceType)},
- | MAX(${quote(idColumn, dataSourceType)}) AS ${quote("upperBound", dataSourceType)}
- |FROM $sourceDb.$sourceTable
- |$rowFilterExpression
- |""".stripMargin
-
- val stepRead = new WorkflowStep()
- stepRead.step = "2"
- stepRead.source = getDataSourceConfig(odsModeling)
- stepRead.source.asInstanceOf[DBDataSourceConfig].numPartitions = "4"
- stepRead.source.asInstanceOf[DBDataSourceConfig].lowerBound = "${lowerBound}"
- stepRead.source.asInstanceOf[DBDataSourceConfig].upperBound = "${upperBound}"
- stepRead.source.asInstanceOf[DBDataSourceConfig].partitionColumn = "idColumn"
- stepRead.target = getTargetSourceConfig(odsModeling)
- stepRead.writeMode = WriteMode.UPSERT //in case of partial failure
- stepRead.sqlTemplate =
- s"""|SELECT $columns
- |FROM $sourceDb.$sourceTable
- |WHERE ${quote(idColumn, dataSourceType)} > $${lowerBound}
- | AND ${quote(idColumn, dataSourceType)} <= $${upperBound}
- |$rowFilterExpression
- |""".stripMargin
- List(step, stepRead)
- }
- // scalastyle:off
- Workflow(workflowName, odsModeling.odsTableConfig.period, odsModeling.odsTableConfig.loadType,
- odsModeling.odsTableConfig.logDrivenType,
- odsModeling.odsTableConfig.upstream, odsModeling.odsTableConfig.dependsOn, null, 0, odsModeling.odsTableConfig.defaultStart, false, null, Map(), steps
- )
- // scalastyle:on
- }
-
-
- private def getTargetSourceConfig(odsModeling: OdsModeling): DBDataSourceConfig = {
- val targetSourceConfig = new DBDataSourceConfig()
- targetSourceConfig.dataSourceType = odsModeling.odsTableConfig.targetType
- targetSourceConfig.dbName = odsModeling.odsTableConfig.targetDb
- targetSourceConfig.tableName = odsModeling.odsTableConfig.targetTable
- targetSourceConfig
- }
-
- private def getDataSourceConfig(odsModeling: OdsModeling): DBDataSourceConfig = {
- val config = new DBDataSourceConfig()
- config.tableName = odsModeling.odsTableConfig.sourceTable
- config.dbName = odsModeling.odsTableConfig.sourceDb
- config.dataSourceType = odsModeling.odsTableConfig.sourceType
- config
- }
-
- private def buildColumnString(odsModeling: OdsModeling, additionalCols: List[String]): String = {
- (odsModeling.columns
- .map(col => getColumnAsString(col, odsModeling.odsTableConfig.sourceType)) ++ additionalCols)
- .mkString(",\n ")
- }
-
- private def getColumnAsString(col: OdsModelingColumn, sourceType: String): String = {
- val exprOrSourceCol = if (isNullOrEmpty(col.extraColumnExpression)) quote(col.sourceColumn, sourceType) else col.extraColumnExpression
- s"""$exprOrSourceCol AS ${quote(col.targetColumn, sourceType)}"""
- }
-
- private def incrColumn(odsModeling: OdsModeling): String = {
- assert(odsModeling.columns.count(c => c.incrementalColumn) == 1)
- odsModeling.columns.filter(c => c.incrementalColumn).map(_.sourceColumn).head
- }
-
-
- def genPartitionClause(odsModeling: OdsModeling): String = {
- val partitionFormat = if (isNullOrEmpty(odsModeling.odsTableConfig.partitionFormat)) "" else odsModeling.odsTableConfig.partitionFormat
- val dialect = getSqlDialect(odsModeling.odsTableConfig.sourceType)
- val partitionClause = partitionFormat match {
- case "" => ""
- case "year/month/day" =>
- val timeFormat = odsModeling.odsTableConfig.timeFormat
- val partitionField = odsModeling.columns.filter(_.incrementalColumn).head.sourceColumn
- List(
- s"${dialect.year(partitionField, timeFormat)} as ${dialect.quoteIdentifier("year")}",
- s"${dialect.month(partitionField, timeFormat)} as ${dialect.quoteIdentifier("month")}",
- s"${dialect.day(partitionField, timeFormat)} as ${dialect.quoteIdentifier("day")}"
- ).mkString(",\n ")
- case _ => ???
- }
- if (isNullOrEmpty(partitionClause)) {
- ""
- } else {
- s""",\n $partitionClause""".stripMargin
- }
- }
-
-}
diff --git a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/ScdSqlGen.scala b/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/ScdSqlGen.scala
deleted file mode 100644
index 548ad4d..0000000
--- a/data-modeling/src/main/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/ScdSqlGen.scala
+++ /dev/null
@@ -1,304 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.{DimTable, DwdModeling}
-import com.github.sharpdata.sharpetl.core.datasource.config.{DBDataSourceConfig, TransformationDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.SPARK_SQL
-import com.github.sharpdata.sharpetl.core.util.Constants.{IncrementalType, LoadType, DataSourceType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{canNotBeEmpty, getTempName, isNullOrEmpty}
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-import AutoCreateDimSqlGen.targetColOrSourceCol
-import DwdExtractSqlGen.ZIP_ID_FLAG
-
-import scala.collection.mutable
-
-object ScdSqlGen {
- def genScdStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int, isSCD: Boolean = true): List[WorkflowStep] = {
- val dwView = steps.last.target.asInstanceOf[DBDataSourceConfig].tableName
-
- val sourceType: String = dwdModding.dwdTableConfig.sourceType
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new TransformationDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TRANSFORMATION)
- sourceConfig.className = s"com.github.sharpdata.sharpetl.spark.transformation.${if (isSCD) "SCDTransformer" else "NonSCDTransformer"}"
- sourceConfig.methodName = "transform"
- sourceConfig.args = Map(
- canNotBeEmpty("odsViewName", s"${dwdModding.dwdTableConfig.sourceTable}__target_selected"),
- canNotBeEmpty("dwViewName", dwView),
- canNotBeEmpty("primaryFields", dwdModding.columns.filter(_.logicPrimaryColumn).map(_.targetColumn).mkString(",")),
- canNotBeEmpty("partitionField", dwdModding.columns.filter(_.businessCreateTime).map(_.targetColumn).mkString(",")),
- canNotBeEmpty("partitionFormat", dwdModding.columns.filter(_.partitionColumn).map(_.targetColumn).mkString("/")),
- canNotBeEmpty("updateTimeField", dwdModding.columns.filter(_.businessUpdateTime).map(_.targetColumn).head),
- canNotBeEmpty("createTimeField", dwdModding.columns.filter(_.businessCreateTime).map(_.targetColumn).head),
- canNotBeEmpty("dwUpdateType", if (dwdModding.dwdTableConfig.loadType == "full") LoadType.FULL else LoadType.INCREMENTAL),
- canNotBeEmpty("timeFormat", "yyyy-MM-dd HH:mm:ss"),
- ("surrogateField", getSurrogateField(dwdModding, dwdModding.dwdTableConfig.targetTable))
- )
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(sourceType)
- targetConfig.setDbName(dwdModding.dwdTableConfig.targetDb)
- targetConfig.setTableName(dwdModding.dwdTableConfig.targetTable)
- step.setTargetConfig(targetConfig)
-
- step.writeMode = WriteMode.OVER_WRITE
-
-
- steps :+ step
- }
-
- def genDwdViewStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int): List[WorkflowStep] = {
- val sourceType: String = SPARK_SQL
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(dwdModding.dwdTableConfig.targetType)
- sourceConfig.setDbName(dwdModding.dwdTableConfig.targetDb)
- sourceConfig.setTableName(dwdModding.dwdTableConfig.targetTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceConfig.tableName, "changed_partition_view"))
- step.setTargetConfig(targetConfig)
-
- val selectSql =
- s"""
- |select *
- |from ${quote(sourceConfig.dbName, sourceType)}.${quote(sourceConfig.tableName, sourceType)}
- |$${DWD_UPDATED_PARTITION}
- |""".stripMargin
-
- step.setSqlTemplate(selectSql)
-
-
- steps :+ step
- }
-
-
- // incremental & scd
- def genDwdPartitionClauseStep(steps: List[WorkflowStep], dwdModding: DwdModeling, stepIndex: Int, isSCD: Boolean = true): List[WorkflowStep] = {
- val sourceType: String = SPARK_SQL
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- sourceConfig.setDataSourceType(dwdModding.dwdTableConfig.targetType)
- sourceConfig.setDbName(dwdModding.dwdTableConfig.targetDb)
- sourceConfig.setTableName(dwdModding.dwdTableConfig.targetTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.VARIABLES)
- step.setTargetConfig(targetConfig)
-
- val selectTargetStep = steps.last
-
- val selectTargetStepConfig = selectTargetStep.target.asInstanceOf[DBDataSourceConfig]
-
- val joinCondition = dwdModding.columns.filter(_.logicPrimaryColumn)
- .map { col =>
- s"dwd.${col.targetColumn} = incremental_data.${col.targetColumn}"
- }.mkString(" and ")
-
- val whereCondition = dwdModding.columns.filter(_.logicPrimaryColumn)
- .map { col =>
- s"incremental_data.${col.targetColumn} is not null"
- }.mkString(" and ")
-
- assert(!isNullOrEmpty(whereCondition))
-
- val partitionCondition = dwdModding.columns.filter(_.partitionColumn).map(_.targetColumn)
- .map(column => {
- s"concat('`$column` = ', `$column`)"
- })
- .mkString(", ")
-
- val selectSql =
- s"""
- |select concat('where (',
- | ifEmpty(
- | concat_ws(')\\n or (', collect_set(concat_ws(' and ', $partitionCondition))),
- | '1 = 1'),
- | ')') as `DWD_UPDATED_PARTITION`
- |from (
- | select
- | dwd.*
- | from ${quote(sourceConfig.dbName, sourceType)}.${quote(sourceConfig.tableName, sourceType)} dwd
- | left join ${quote(selectTargetStepConfig.tableName, sourceType)} incremental_data on $joinCondition
- | where $whereCondition
- | ${if (isSCD) "and dwd.is_latest = 1" else "and '1' = '1'"}
- |)
- |""".stripMargin
-
- step.setSqlTemplate(selectSql)
-
-
- steps :+ step
- }
-
- //TODO: refactor
- def genDwdPartitionClauseStep(steps: List[WorkflowStep], dimTable: DimTable, stepIndex: Int): List[WorkflowStep] = {
- val sourceType: String = SPARK_SQL
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- val any = dimTable.cols.head
- sourceConfig.setDataSourceType(any.joinDbType)
- sourceConfig.setDbName(any.joinDb)
- sourceConfig.setTableName(any.joinTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.VARIABLES)
- step.setTargetConfig(targetConfig)
-
- val selectTargetStep = steps.last
-
- val selectTargetStepConfig = selectTargetStep.target.asInstanceOf[DBDataSourceConfig]
-
- val joinCondition = dimTable.joinOnColumns
- .map { col =>
- s"dwd.${col.joinTableColumn} = incremental_data.${col.joinTableColumn}"
- }.mkString(" and ")
-
- val whereCondition = dimTable.joinOnColumns
- .map { col =>
- s"incremental_data.${col.joinTableColumn} is not null"
- }.mkString(" and ")
-
- assert(!isNullOrEmpty(whereCondition))
-
- val partitionCondition = dimTable.partitionCols.map(_.sourceColumn)
- .map(column => {
- s"concat('`$column` = ', `$column`)"
- })
- .mkString(", ")
-
- val selectSql =
- s"""
- |select concat('where (',
- | ifEmpty(
- | concat_ws(')\\n or (', collect_set(concat_ws(' and ', $partitionCondition))),
- | '1 = 1'),
- | ')') as `DWD_UPDATED_PARTITION`
- |from (
- | select dwd.*
- | from ${quote(sourceConfig.dbName, sourceType)}.${quote(sourceConfig.tableName, sourceType)} dwd
- | left join ${quote(selectTargetStepConfig.tableName, sourceType)} incremental_data on $joinCondition
- | where $whereCondition
- | and dwd.is_latest = 1
- |)
- |""".stripMargin
-
- step.setSqlTemplate(selectSql)
-
-
- steps :+ step
- }
-
-
- //TODO: refactor
- def genDwdViewStep(steps: List[WorkflowStep], dimTable: DimTable, stepIndex: Int): List[WorkflowStep] = {
- val sourceType: String = SPARK_SQL
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new DBDataSourceConfig
- val any = dimTable.cols.head
- sourceConfig.setDataSourceType(any.joinDbType)
- sourceConfig.setDbName(any.joinDb)
- sourceConfig.setTableName(any.joinTable)
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(DataSourceType.TEMP)
- targetConfig.setTableName(getTempName(sourceConfig.tableName, "changed_partition_view"))
- step.setTargetConfig(targetConfig)
-
- val selectSql =
- s"""
- |select *
- |from ${quote(sourceConfig.dbName, sourceType)}.${quote(sourceConfig.tableName, sourceType)}
- |$${DWD_UPDATED_PARTITION}
- |""".stripMargin
-
- step.setSqlTemplate(selectSql)
-
-
-
- steps :+ step
- }
-
- def genScdStep(steps: List[WorkflowStep],
- dwdModding: DwdModeling,
- dimTable: DimTable,
- stepIndex: Int,
- odsView: String,
- dwView: String): List[WorkflowStep] = {
-
- val any = dimTable.cols.head
-
- val step = new WorkflowStep
-
- step.setStep(stepIndex.toString)
-
- val sourceConfig = new TransformationDataSourceConfig
- sourceConfig.setDataSourceType(DataSourceType.TRANSFORMATION)
- sourceConfig.className = "com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer"
- sourceConfig.methodName = "transform"
- sourceConfig.args = Map(
- canNotBeEmpty("odsViewName", odsView),
- canNotBeEmpty("dwViewName", dwView),
- canNotBeEmpty("primaryFields", dimTable.joinOnColumns.map(_.joinTableColumn).mkString(",")),
- //canNotBeEmpty("partitionField", ""),
- //canNotBeEmpty("partitionFormat", ""),
- canNotBeEmpty("updateTimeField", dimTable.updateTimeCols.map(_.sourceColumn).head),
- canNotBeEmpty("createTimeField", dimTable.createTimeCols.map(_.sourceColumn).head),
- canNotBeEmpty("dropUpdateTimeField", "true"),
- canNotBeEmpty("dwUpdateType", if (dwdModding.dwdTableConfig.loadType == "full") LoadType.FULL else LoadType.INCREMENTAL),
- canNotBeEmpty("timeFormat", "yyyy-MM-dd HH:mm:ss"),
- ("surrogateField", getSurrogateField(dwdModding, any.joinTable))
- )
- step.setSourceConfig(sourceConfig)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDataSourceType(any.joinDbType)
- targetConfig.setDbName(any.joinDb)
- targetConfig.setTableName(any.joinTable)
- step.setTargetConfig(targetConfig)
-
- step.writeMode = WriteMode.OVER_WRITE
-
-
-
- steps :+ step
- }
-
- private def getSurrogateField(dwdModding: DwdModeling, targetTable: String): String = {
- val cols = dwdModding.columns
- .filter(it => isNullOrEmpty(it.sourceColumn) && it.extraColumnExpression == ZIP_ID_FLAG)
- .filter(it => it.joinTable == targetTable || (it.targetTable == targetTable && isNullOrEmpty(it.joinTable)))
- if (cols.isEmpty) {
- ""
- } else {
- assert(cols.size == 1)
- targetColOrSourceCol(cols.head)
- }
- }
-}
diff --git a/data-modeling/src/test/resources/application.properties b/data-modeling/src/test/resources/application.properties
deleted file mode 100644
index e17c7a5..0000000
--- a/data-modeling/src/test/resources/application.properties
+++ /dev/null
@@ -1,40 +0,0 @@
-etl.workflow.path=tasks
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-spark.default.spark.sql.adaptive.enabled=true
-spark.default.spark.sql.adaptive.logLevel=info
-spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
-spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
-spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
-spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
-spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
-spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
-spark.default.spark.sql.autoBroadcastJoinThreshold=-1
-spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
-spark.default.hive.exec.dynamic.partition=true
-spark.default.hive.exec.dynamic.partition.mode=nonstrict
-spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-
-flyway.driver=com.mysql.cj.jdbc.Driver
-flyway.url=jdbc:mysql://localhost:2333/sharp_etl?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&autoReconnect=true
-flyway.username=admin
-flyway.password=ENC(8CW2DkIPNyDX+d/p1/fXyw==)
-
-int_test.mysql.url=jdbc:mysql://localhost:2334/int_test
-int_test.mysql.driver=com.mysql.cj.jdbc.Driver
-int_test.mysql.user=admin
-int_test.mysql.password=ENC(AUmnp9GPe51M6fN4ExVc+A==)
-int_test.mysql.fetchsize=1000
\ No newline at end of file
diff --git a/data-modeling/src/test/resources/data-dict-v2-event-hive.xlsx b/data-modeling/src/test/resources/data-dict-v2-event-hive.xlsx
deleted file mode 100644
index 1a2cd43..0000000
Binary files a/data-modeling/src/test/resources/data-dict-v2-event-hive.xlsx and /dev/null differ
diff --git a/data-modeling/src/test/resources/data-dict-v2-hive.xlsx b/data-modeling/src/test/resources/data-dict-v2-hive.xlsx
deleted file mode 100644
index db0b61e..0000000
Binary files a/data-modeling/src/test/resources/data-dict-v2-hive.xlsx and /dev/null differ
diff --git a/data-modeling/src/test/resources/data-dict-v2-postgres.xlsx b/data-modeling/src/test/resources/data-dict-v2-postgres.xlsx
deleted file mode 100644
index a2f7a21..0000000
Binary files a/data-modeling/src/test/resources/data-dict-v2-postgres.xlsx and /dev/null differ
diff --git a/data-modeling/src/test/resources/etl.key b/data-modeling/src/test/resources/etl.key
deleted file mode 100644
index aefe06c..0000000
Binary files a/data-modeling/src/test/resources/etl.key and /dev/null differ
diff --git a/data-modeling/src/test/resources/ods-template.xlsx b/data-modeling/src/test/resources/ods-template.xlsx
deleted file mode 100644
index ada9d6d..0000000
Binary files a/data-modeling/src/test/resources/ods-template.xlsx and /dev/null differ
diff --git a/data-modeling/src/test/resources/tasks/auto_create_dim.sql b/data-modeling/src/test/resources/tasks/auto_create_dim.sql
deleted file mode 100644
index c0d9746..0000000
--- a/data-modeling/src/test/resources/tasks/auto_create_dim.sql
+++ /dev/null
@@ -1,337 +0,0 @@
--- workflow=auto_create_dim
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=ods__t_order
-
--- step=1
--- source=hive
--- dbName=ods
--- tableName=t_order
--- target=temp
--- tableName=t_order__extracted
--- writeMode=overwrite
-select
- `order_id` as `order_id`,
- `order_sn` as `order_sn`,
- `product_code` as `product_code`,
- `product_name` as `product_name`,
- `product_version` as `product_version`,
- `product_status` as `product_status`,
- `user_code` as `user_code`,
- `user_name` as `user_name`,
- `user_age` as `user_age`,
- `user_address` as `user_address`,
- `class_code` as `class_code`,
- `class_name` as `class_name`,
- `class_address` as `class_address`,
- `product_count` as `product_count`,
- `price` as `price`,
- `discount` as `discount`,
- `order_status` as `order_status`,
- `order_create_time` as `order_create_time`,
- `order_update_time` as `order_update_time`,
- price - discount as `actual`,
- `year` as `year`,
- `month` as `month`,
- `day` as `day`
-from `ods`.`t_order`
-where `year` = '${YEAR}'
- and `month` = '${MONTH}'
- and `day` = '${DAY}'
-and product_version='v1';
-
--- step=2
--- source=temp
--- tableName=t_order__extracted
--- target=temp
--- tableName=t_order__grouped_dim
--- writeMode=overwrite
-select `t_order`.`class_code` as `dim_t_dim_class____class_code`,
- `t_order`.`class_name` as `dim_t_dim_class____class_name`,
- `t_order`.`class_address` as `dim_t_dim_class____class_address`,
- `t_order`.`product_code` as `dim_t_dim_product____mid`,
- `t_order`.`product_name` as `dim_t_dim_product____name`,
- `t_order`.`product_version` as `dim_t_dim_product____product_version`,
- `t_order`.`product_status` as `dim_t_dim_product____product_status`,
- case
- when (
- `t_dim_class`.`class_code` is null
- ) then 'new'
- when (
- `t_order`.`class_code` != `t_dim_class`.`class_code` or
- `t_order`.`class_name` != `t_dim_class`.`class_name` or
- `t_order`.`class_address` != `t_dim_class`.`class_address`
- ) then 'updated'
- else 'nochange'
- end as `auto_created_t_dim_class_status`,
- case
- when (
- `t_dim_product`.`mid` is null
- ) then 'new'
- when (
- `t_order`.`product_code` != `t_dim_product`.`mid` or
- `t_order`.`product_name` != `t_dim_product`.`name` or
- `t_order`.`product_version` != `t_dim_product`.`product_version` or
- `t_order`.`product_status` != `t_dim_product`.`product_status`
- ) then 'updated'
- else 'nochange'
- end as `auto_created_t_dim_product_status`,
- `t_order`.`year` as `year`,
- `t_order`.`month` as `month`,
- `t_order`.`day` as `day`,
- `t_order`.`order_update_time` as `order_update_time`,
- `t_order`.`order_create_time` as `order_create_time`
-from t_order__extracted `t_order`
- left join `dim`.`t_dim_class` `t_dim_class` -- TODO: year/month/day
- on `t_order`.`class_code` = `t_dim_class`.`class_code`
- left join `dim`.`t_dim_product` `t_dim_product` -- TODO: year/month/day
- on `t_order`.`product_code` = `t_dim_product`.`mid`;
-
--- step=3
--- source=temp
--- tableName=t_order__grouped_dim
--- target=temp
--- tableName=t_order__selected_dim
--- writeMode=overwrite
-select `class_id`,`class_code`,`class_name`,`class_address`,`order_update_time`,`order_create_time`,`is_auto_created`,`year`,`month`,`day` from (
- select uuid() as `class_id`,
- `dim_t_dim_class____class_code` as `class_code`,
- `dim_t_dim_class____class_name` as `class_name`,
- `dim_t_dim_class____class_address` as `class_address`,
- '1' as `is_auto_created`,
- `year`,
- `month`,
- `day`,
- `order_update_time`,
- `order_create_time`,
- row_number() OVER (PARTITION BY `dim_t_dim_class____class_code` ORDER BY `order_update_time` DESC) as row_number
- from t_order__grouped_dim t_order__grouped_dim
- where (`dim_t_dim_class____class_code` is not null)
- and (`auto_created_t_dim_class_status` = 'new')
-) where row_number = 1;
-
--- step=4
--- source=hive
--- dbName=dim
--- tableName=t_dim_class
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select dwd.*
- from `dim`.`t_dim_class` dwd
- left join `t_order__selected_dim` incremental_data on dwd.class_code = incremental_data.class_code
- where incremental_data.class_code is not null
- and dwd.is_latest = 1
-);
-
--- step=5
--- source=hive
--- dbName=dim
--- tableName=t_dim_class
--- target=temp
--- tableName=t_dim_class__changed_partition_view
-select *
-from `dim`.`t_dim_class`
-${DWD_UPDATED_PARTITION};
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=order_create_time
--- dropUpdateTimeField=true
--- dwUpdateType=incremental
--- dwViewName=t_dim_class__changed_partition_view
--- odsViewName=t_order__selected_dim
--- primaryFields=class_code
--- surrogateField=class_id
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=order_update_time
--- transformerType=object
--- target=hive
--- dbName=dim
--- tableName=t_dim_class
--- writeMode=overwrite
-
--- step=7
--- source=temp
--- tableName=t_order__grouped_dim
--- target=temp
--- tableName=t_order__selected_dim
--- writeMode=overwrite
-select uuid() as `product_id`,
- `dim_t_dim_product____mid` as `mid`,
- `dim_t_dim_product____name` as `name`,
- `dim_t_dim_product____product_version` as `product_version`,
- `dim_t_dim_product____product_status` as `product_status`,
- '1' as `is_auto_created`,
- `year`,
- `month`,
- `day`,
- `order_update_time`,
- `order_create_time`
-from t_order__grouped_dim t_order__grouped_dim
-where (`dim_t_dim_product____mid` is not null)
- and (
- `auto_created_t_dim_product_status` = 'new'
- or `auto_created_t_dim_product_status` = 'updated');
-
--- step=8
--- source=hive
--- dbName=dim
--- tableName=t_dim_product
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select dwd.*
- from `dim`.`t_dim_product` dwd
- left join `t_order__selected_dim` incremental_data on dwd.mid = incremental_data.mid
- where incremental_data.mid is not null
- and dwd.is_latest = 1
-);
-
--- step=9
--- source=hive
--- dbName=dim
--- tableName=t_dim_product
--- target=temp
--- tableName=t_dim_product__changed_partition_view
-select *
-from `dim`.`t_dim_product`
-${DWD_UPDATED_PARTITION};
-
--- step=10
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=order_create_time
--- dropUpdateTimeField=true
--- dwUpdateType=incremental
--- dwViewName=t_dim_product__changed_partition_view
--- odsViewName=t_order__selected_dim
--- primaryFields=mid
--- surrogateField=product_id
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=order_update_time
--- transformerType=object
--- target=hive
--- dbName=dim
--- tableName=t_dim_product
--- writeMode=overwrite
-
--- step=11
--- source=temp
--- tableName=t_order__extracted
--- target=temp
--- tableName=t_order__joined
--- writeMode=append
-select
- `t_order__extracted`.*,
- case when `t_dim_class`.`class_id` is null then '-1'
- else `t_dim_class`.`class_id`
- end as `class_id`,
- case when `t_dim_product`.`product_id` is null then '-1'
- else `t_dim_product`.`product_id`
- end as `product_id`,
- case when `t_dim_user`.`dim_user_id` is null then '-1'
- else `t_dim_user`.`dim_user_id`
- end as `user_id`
-from `t_order__extracted`
-left join `dim`.`t_dim_class` `t_dim_class`
- on `t_order__extracted`.`class_code` = `t_dim_class`.`class_code`
- and `t_order__extracted`.`order_create_time` >= `t_dim_class`.`start_time`
- and (`t_order__extracted`.`order_create_time` < `t_dim_class`.`end_time`
- or `t_dim_class`.`end_time` is null)
-
-left join `dim`.`t_dim_product` `t_dim_product`
- on `t_order__extracted`.`product_code` = `t_dim_product`.`mid`
- and `t_order__extracted`.`order_create_time` >= `t_dim_product`.`start_time`
- and (`t_order__extracted`.`order_create_time` < `t_dim_product`.`end_time`
- or `t_dim_product`.`end_time` is null)
-
-left join `dim`.`t_dim_user` `t_dim_user`
- on `t_order__extracted`.`user_code` = `t_dim_user`.`user_info_code`
- and `t_order__extracted`.`order_create_time` >= `t_dim_user`.`start_time`
- and (`t_order__extracted`.`order_create_time` < `t_dim_user`.`end_time`
- or `t_dim_user`.`end_time` is null);
-
--- step=12
--- source=temp
--- tableName=t_order__joined
--- target=temp
--- tableName=t_order__target_selected
--- writeMode=overwrite
-select
- `order_id`,
- `order_sn`,
- `product_id`,
- `user_id`,
- `class_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`,
- `year`,
- `month`,
- `day`
-from `t_order__joined`;
-
--- step=13
--- source=hive
--- dbName=dwd
--- tableName=t_fact_order
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select
- dwd.*
- from `dwd`.`t_fact_order` dwd
- left join `t_order__target_selected` incremental_data on dwd.order_id = incremental_data.order_id
- where incremental_data.order_id is not null
- and dwd.is_latest = 1
-);
-
--- step=14
--- source=hive
--- dbName=dwd
--- tableName=t_fact_order
--- target=temp
--- tableName=t_fact_order__changed_partition_view
-select *
-from `dwd`.`t_fact_order`
-${DWD_UPDATED_PARTITION};
-
--- step=15
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=order_create_time
--- dwUpdateType=incremental
--- dwViewName=t_fact_order__changed_partition_view
--- odsViewName=t_order__target_selected
--- partitionField=order_create_time
--- partitionFormat=year/month/day
--- primaryFields=order_id
--- surrogateField=
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=order_update_time
--- transformerType=object
--- target=hive
--- dbName=dwd
--- tableName=t_fact_order
--- writeMode=overwrite
diff --git a/data-modeling/src/test/resources/tasks/dim_student.sql b/data-modeling/src/test/resources/tasks/dim_student.sql
deleted file mode 100644
index 364e16f..0000000
--- a/data-modeling/src/test/resources/tasks/dim_student.sql
+++ /dev/null
@@ -1,98 +0,0 @@
--- workflow=dim_student
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=ods__t_student
-
--- step=1
--- source=hive
--- dbName=ods
--- tableName=t_student
--- target=temp
--- tableName=t_student__extracted
--- writeMode=overwrite
-select
- uuid() as `student_id`,
- `student_code` as `student_code`,
- `student_name` as `student_name`,
- `student_age` as `student_age`,
- `student_address` as `student_address`,
- `student_blabla` as `student_blabla`,
- `student_create_time` as `student_create_time`,
- `student_update_time` as `student_update_time`,
- `year` as `year`,
- `month` as `month`,
- `day` as `day`
-from `ods`.`t_student`
-where `year` = '${YEAR}'
- and `month` = '${MONTH}'
- and `day` = '${DAY}'
-and student_age<30;
-
--- step=2
--- source=temp
--- tableName=t_student__extracted
--- target=temp
--- tableName=t_student__target_selected
--- writeMode=overwrite
-select
- `student_id`,
- `student_code`,
- `student_name`,
- `student_age`,
- `student_address`,
- `student_create_time`,
- `student_update_time`,
- `year`,
- `month`,
- `day`,
- '0' as `is_auto_created`
-from `t_student__extracted`;
-
--- step=3
--- source=hive
--- dbName=dim
--- tableName=t_dim_student
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select
- dwd.*
- from `dim`.`t_dim_student` dwd
- left join `t_student__target_selected` incremental_data on dwd.student_code = incremental_data.student_code
- where incremental_data.student_code is not null
- and dwd.is_latest = 1
-);
-
--- step=4
--- source=hive
--- dbName=dim
--- tableName=t_dim_student
--- target=temp
--- tableName=t_dim_student__changed_partition_view
-select *
-from `dim`.`t_dim_student`
-${DWD_UPDATED_PARTITION};
-
--- step=5
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=student_create_time
--- dwUpdateType=incremental
--- dwViewName=t_dim_student__changed_partition_view
--- odsViewName=t_student__target_selected
--- partitionField=student_create_time
--- partitionFormat=year/month/day
--- primaryFields=student_code
--- surrogateField=student_id
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=student_update_time
--- transformerType=object
--- target=hive
--- dbName=dim
--- tableName=t_dim_student
--- writeMode=overwrite
diff --git a/data-modeling/src/test/resources/tasks/fact_device.sql b/data-modeling/src/test/resources/tasks/fact_device.sql
deleted file mode 100644
index 45c66ab..0000000
--- a/data-modeling/src/test/resources/tasks/fact_device.sql
+++ /dev/null
@@ -1,112 +0,0 @@
--- workflow=fact_device
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=ods__t_device
-
--- step=1
--- source=hive
--- dbName=ods
--- tableName=t_device
--- target=temp
--- tableName=t_device__extracted
--- writeMode=overwrite
-select
- `device_id` as `device_id`,
- `manufacturer` as `manufacturer`,
- `status` as `status`,
- `online` as `online`,
- `create_time` as `create_time`,
- `update_time` as `update_time`,
- `year` as `year`,
- `month` as `month`,
- `day` as `day`
-from `ods`.`t_device`
-where `year` = '${YEAR}'
- and `month` = '${MONTH}'
- and `day` = '${DAY}';
-
--- step=2
--- source=temp
--- tableName=t_device__extracted
--- target=temp
--- tableName=t_device__joined
--- writeMode=append
-select
- `t_device__extracted`.*,
- case when `t_dim_user`.`user_code` is null then '-1'
- else `t_dim_user`.`user_code`
- end as `user_id`
-from `t_device__extracted`
-left join `dim`.`t_dim_user` `t_dim_user`
- on `t_device__extracted`.`user_id` = `t_dim_user`.`user_code`
- and `t_device__extracted`.`create_time` >= `t_dim_user`.`start_time`
- and (`t_device__extracted`.`create_time` < `t_dim_user`.`end_time`
- or `t_dim_user`.`end_time` is null);
-
--- step=3
--- source=temp
--- tableName=t_device__joined
--- target=temp
--- tableName=t_device__target_selected
--- writeMode=overwrite
-select
- `device_id`,
- `manufacturer`,
- `user_id`,
- `status`,
- `online`,
- `create_time`,
- `update_time`,
- `year`,
- `month`,
- `day`
-from `t_device__joined`;
-
--- step=4
--- source=hive
--- dbName=dwd
--- tableName=t_fact_device
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select
- dwd.*
- from `dwd`.`t_fact_device` dwd
- left join `t_device__target_selected` incremental_data on dwd.device_id = incremental_data.device_id
- where incremental_data.device_id is not null
- and dwd.is_latest = 1
-);
-
--- step=5
--- source=hive
--- dbName=dwd
--- tableName=t_fact_device
--- target=temp
--- tableName=t_fact_device__changed_partition_view
-select *
-from `dwd`.`t_fact_device`
-${DWD_UPDATED_PARTITION};
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=create_time
--- dwUpdateType=incremental
--- dwViewName=t_fact_device__changed_partition_view
--- odsViewName=t_device__target_selected
--- partitionField=create_time
--- partitionFormat=year/month/day
--- primaryFields=device_id
--- surrogateField=
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=update_time
--- transformerType=object
--- target=hive
--- dbName=dwd
--- tableName=t_fact_device
--- writeMode=overwrite
\ No newline at end of file
diff --git a/data-modeling/src/test/resources/tasks/fact_event.sql b/data-modeling/src/test/resources/tasks/fact_event.sql
deleted file mode 100644
index 7096b00..0000000
--- a/data-modeling/src/test/resources/tasks/fact_event.sql
+++ /dev/null
@@ -1,213 +0,0 @@
--- workflow=fact_event
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=ods__t_event
-
--- step=1
--- source=hive
--- dbName=ods
--- tableName=t_event
--- target=temp
--- tableName=t_event__extracted
--- writeMode=overwrite
-select
- `event_id` as `event_id`,
- `device_IMEI` as `device_IMEI`,
- `device_model` as `device_model`,
- `device_version` as `device_version`,
- `device_language` as `device_language`,
- `event_status` as `event_status`,
- `create_time` as `create_time`,
- `update_time` as `update_time`,
- `year` as `year`,
- `month` as `month`,
- `day` as `day`
-from `ods`.`t_event`
-where `year` = '${YEAR}'
- and `month` = '${MONTH}'
- and `day` = '${DAY}';
-
--- step=2
--- source=temp
--- tableName=t_event__extracted
--- target=temp
--- tableName=t_event__grouped_dim
--- writeMode=overwrite
-select `t_event`.`device_IMEI` as `dim_t_dim_device____device_imei`,
- `t_event`.`device_model` as `dim_t_dim_device____device_model`,
- `t_event`.`device_version` as `dim_t_dim_device____device_version`,
- `t_event`.`device_language` as `dim_t_dim_device____device_language`,
- case
- when (
- `t_dim_device`.`device_imei` is null
- ) then 'new'
- when (
- `t_event`.`device_IMEI` != `t_dim_device`.`device_imei` or
- `t_event`.`device_model` != `t_dim_device`.`device_model` or
- `t_event`.`device_version` != `t_dim_device`.`device_version` or
- `t_event`.`device_language` != `t_dim_device`.`device_language`
- ) then 'updated'
- else 'nochange'
- end as `auto_created_t_dim_device_status`,
- `t_event`.`year` as `year`,
- `t_event`.`month` as `month`,
- `t_event`.`day` as `day`,
- `t_event`.`update_time` as `update_time`,
- `t_event`.`create_time` as `create_time`
-from t_event__extracted `t_event`
- left join `dim`.`t_dim_device` `t_dim_device` -- TODO: year/month/day
- on `t_event`.`device_IMEI` = `t_dim_device`.`device_imei`;
-
--- step=3
--- source=temp
--- tableName=t_event__grouped_dim
--- target=temp
--- tableName=t_event__selected_dim
--- writeMode=overwrite
-select uuid() as `device_id`,
- `dim_t_dim_device____device_imei` as `device_imei`,
- `dim_t_dim_device____device_model` as `device_model`,
- `dim_t_dim_device____device_version` as `device_version`,
- `dim_t_dim_device____device_language` as `device_language`,
- '1' as `is_auto_created`,
- `year`,
- `month`,
- `day`,
- `update_time`,
- `create_time`
-from t_event__grouped_dim t_event__grouped_dim
-where (`dim_t_dim_device____device_imei` is not null)
- and (
- `auto_created_t_dim_device_status` = 'new'
- or `auto_created_t_dim_device_status` = 'updated');
-
--- step=4
--- source=hive
--- dbName=dim
--- tableName=t_dim_device
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select dwd.*
- from `dim`.`t_dim_device` dwd
- left join `t_event__selected_dim` incremental_data on dwd.device_imei = incremental_data.device_imei
- where incremental_data.device_imei is not null
- and dwd.is_latest = 1
-);
-
--- step=5
--- source=hive
--- dbName=dim
--- tableName=t_dim_device
--- target=temp
--- tableName=t_dim_device__changed_partition_view
-select *
-from `dim`.`t_dim_device`
-${DWD_UPDATED_PARTITION};
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
--- methodName=transform
--- createTimeField=create_time
--- dropUpdateTimeField=true
--- dwUpdateType=incremental
--- dwViewName=t_dim_device__changed_partition_view
--- odsViewName=t_event__selected_dim
--- primaryFields=device_imei
--- surrogateField=device_id
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=update_time
--- transformerType=object
--- target=hive
--- dbName=dim
--- tableName=t_dim_device
--- writeMode=overwrite
-
--- step=7
--- source=temp
--- tableName=t_event__extracted
--- target=temp
--- tableName=t_event__joined
--- writeMode=append
-select
- `t_event__extracted`.*,
- case when `t_dim_device`.`device_id` is null then '-1'
- else `t_dim_device`.`device_id`
- end as `device_id`
-from `t_event__extracted`
-left join `dim`.`t_dim_device` `t_dim_device`
- on `t_event__extracted`.`device_IMEI` = `t_dim_device`.`device_imei`
- and `t_event__extracted`.`create_time` >= `t_dim_device`.`start_time`
- and (`t_event__extracted`.`create_time` < `t_dim_device`.`end_time`
- or `t_dim_device`.`end_time` is null);
-
--- step=8
--- source=temp
--- tableName=t_event__joined
--- target=temp
--- tableName=t_event__target_selected
--- writeMode=overwrite
-select
- `event_id`,
- `device_id`,
- `event_status`,
- `create_time`,
- `update_time`,
- `year`,
- `month`,
- `day`
-from `t_event__joined`;
-
--- step=9
--- source=hive
--- dbName=dwd
--- tableName=t_fact_event
--- target=variables
-select concat('where (',
- ifEmpty(
- concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
- '1 = 1'),
- ')') as `DWD_UPDATED_PARTITION`
-from (
- select
- dwd.*
- from `dwd`.`t_fact_event` dwd
- left join `t_event__target_selected` incremental_data on dwd.event_id = incremental_data.event_id
- where incremental_data.event_id is not null
- and '1' = '1'
-);
-
--- step=10
--- source=hive
--- dbName=dwd
--- tableName=t_fact_event
--- target=temp
--- tableName=t_fact_event__changed_partition_view
-select *
-from `dwd`.`t_fact_event`
-${DWD_UPDATED_PARTITION};
-
--- step=11
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.NonSCDTransformer
--- methodName=transform
--- createTimeField=create_time
--- dwUpdateType=incremental
--- dwViewName=t_fact_event__changed_partition_view
--- odsViewName=t_event__target_selected
--- partitionField=create_time
--- partitionFormat=year/month/day
--- primaryFields=event_id
--- surrogateField=
--- timeFormat=yyyy-MM-dd HH:mm:ss
--- updateTimeField=update_time
--- transformerType=object
--- target=hive
--- dbName=dwd
--- tableName=t_fact_event
--- writeMode=overwrite
diff --git a/data-modeling/src/test/resources/tasks/ods-template.sql b/data-modeling/src/test/resources/tasks/ods-template.sql
deleted file mode 100644
index be7a4dd..0000000
--- a/data-modeling/src/test/resources/tasks/ods-template.sql
+++ /dev/null
@@ -1,35 +0,0 @@
--- workflow=ods-template
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.order
--- target=hive
--- dbName=ods
--- tableName=t_order
--- writeMode=append
-SELECT "order_sn" AS "order_sn",
- "product_code" AS "product_code",
- "product_name" AS "product_name",
- "product_version" AS "product_version",
- "product_status" AS "product_status",
- "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "product_count" AS "product_count",
- "price" AS "price",
- "discount" AS "discount",
- "order_status" AS "order_status",
- "order_create_time" AS "order_create_time",
- "order_update_time" AS "order_update_time",
- '${JOB_ID}' AS "job_id",
- to_char("order_update_time", 'yyyy') as "year",
- to_char("order_update_time", 'MM') as "month",
- to_char("order_update_time", 'DD') as "day"
-FROM "postgres"."sales"."order"
-WHERE "order_update_time" >= '${DATA_RANGE_START}' AND "order_update_time" < '${DATA_RANGE_END}'
-AND product_count= 1;
diff --git a/data-modeling/src/test/resources/tasks/ods-template2.sql b/data-modeling/src/test/resources/tasks/ods-template2.sql
deleted file mode 100644
index b3ef550..0000000
--- a/data-modeling/src/test/resources/tasks/ods-template2.sql
+++ /dev/null
@@ -1,26 +0,0 @@
--- workflow=ods-template2
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.product
--- target=hive
--- dbName=ods
--- tableName=t_product
--- writeMode=append
-SELECT "mid" AS "product_code",
- "name" AS "product_name",
- "version" AS "product_version",
- "status" AS "product_status",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id",
- to_char("update_time", 'yyyy') as "year",
- to_char("update_time", 'MM') as "month",
- to_char("update_time", 'DD') as "day"
-FROM "postgres"."sales"."product"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}'
-AND version=1 and (name= 'sss' or status= false);
diff --git a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_no_sc.sql b/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_no_sc.sql
deleted file mode 100644
index 24f9b62..0000000
--- a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_no_sc.sql
+++ /dev/null
@@ -1,115 +0,0 @@
--- workflow=ods_to_dwd_full_no_sc
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=source_to_ods
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- target=temp
--- tableName=ods_t_order__extracted
--- writeMode=overwrite
-select
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_order"
-where "job_id" = '${DATA_RANGE_START}';
-
--- step=2
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "user_id", "user_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=4
--- source=temp
--- tableName=ods_t_order__extracted
--- target=temp
--- tableName=ods_t_order__joined
--- writeMode=append
-select
- `ods_t_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`user_id` end as `user_id`
-from `ods_t_order__extracted`
- left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
- left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_code`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=5
--- source=temp
--- tableName=ods_t_order__joined
--- target=temp
--- tableName=ods_t_order__target_selected
--- writeMode=overwrite
-select
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_order__joined`;
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_status":"varchar(128)","actual":"decimal(10,4)","order_create_time":"timestamp","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","product_id":"varchar(128)","discount":"decimal(10,4)","order_update_time":"timestamp","order_sn":"varchar(128)"}
--- primaryFields=order_sn
--- slowChanging=false
--- updateTable=ods_t_order__target_selected
--- updateType=full
--- transformerType=object
--- target=do_nothing
-
diff --git a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_sc.sql b/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_sc.sql
deleted file mode 100644
index 40964ae..0000000
--- a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_full_sc.sql
+++ /dev/null
@@ -1,116 +0,0 @@
--- workflow=ods_to_dwd_full_sc
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=source_to_ods
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- target=temp
--- tableName=ods_t_order__extracted
--- writeMode=overwrite
-select
- uuid_generate_v1() as "onedata_order_id",
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_order"
-where "job_id" = '${DATA_RANGE_START}';
-
--- step=2
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "user_id", "user_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=4
--- source=temp
--- tableName=ods_t_order__extracted
--- target=temp
--- tableName=ods_t_order__joined
--- writeMode=append
-select
- `ods_t_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`user_id` end as `user_id`
-from `ods_t_order__extracted`
-left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
-left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_code`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=5
--- source=temp
--- tableName=ods_t_order__joined
--- target=temp
--- tableName=ods_t_order__target_selected
--- writeMode=overwrite
-select
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_order__joined`;
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_status":"varchar(128)","actual":"decimal(10,4)","order_create_time":"timestamp","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","product_id":"varchar(128)","discount":"decimal(10,4)","order_update_time":"timestamp","order_sn":"varchar(128)"}
--- primaryFields=order_sn
--- slowChanging=true
--- updateTable=ods_t_order__target_selected
--- updateType=full
--- transformerType=object
--- target=do_nothing
-
diff --git a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_no_sc.sql b/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_no_sc.sql
deleted file mode 100644
index b8c890e..0000000
--- a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_no_sc.sql
+++ /dev/null
@@ -1,115 +0,0 @@
--- workflow=ods_to_dwd_incremental_no_sc
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=source_to_ods
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- target=temp
--- tableName=ods_t_order__extracted
--- writeMode=overwrite
-select
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_order"
-where "job_id" = '${DATA_RANGE_START}';
-
--- step=2
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "user_id", "user_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=4
--- source=temp
--- tableName=ods_t_order__extracted
--- target=temp
--- tableName=ods_t_order__joined
--- writeMode=append
-select
- `ods_t_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`user_id` end as `user_id`
-from `ods_t_order__extracted`
-left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
-left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_code`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=5
--- source=temp
--- tableName=ods_t_order__joined
--- target=temp
--- tableName=ods_t_order__target_selected
--- writeMode=overwrite
-select
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_order__joined`;
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_status":"varchar(128)","actual":"decimal(10,4)","order_create_time":"timestamp","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","product_id":"varchar(128)","discount":"decimal(10,4)","order_update_time":"timestamp","order_sn":"varchar(128)"}
--- primaryFields=order_sn
--- slowChanging=false
--- updateTable=ods_t_order__target_selected
--- updateType=incremental
--- transformerType=object
--- target=do_nothing
-
diff --git a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_sc.sql b/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_sc.sql
deleted file mode 100644
index d8344d3..0000000
--- a/data-modeling/src/test/resources/tasks/postgres/ods_to_dwd_incremental_sc.sql
+++ /dev/null
@@ -1,117 +0,0 @@
--- workflow=ods_to_dwd_incremental_sc
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=source_to_ods
-
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- target=temp
--- tableName=ods_t_order__extracted
--- writeMode=overwrite
-select
- uuid_generate_v1() as "onedata_order_id",
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_order"
-where "job_id" = '${DATA_RANGE_START}';
-
--- step=2
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "user_id", "user_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=4
--- source=temp
--- tableName=ods_t_order__extracted
--- target=temp
--- tableName=ods_t_order__joined
--- writeMode=append
-select
- `ods_t_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`user_id` end as `user_id`
-from `ods_t_order__extracted`
-left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
-left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_code`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=5
--- source=temp
--- tableName=ods_t_order__joined
--- target=temp
--- tableName=ods_t_order__target_selected
--- writeMode=overwrite
-select
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_order__joined`;
-
--- step=6
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_status":"varchar(128)","actual":"decimal(10,4)","order_create_time":"timestamp","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","product_id":"varchar(128)","discount":"decimal(10,4)","order_update_time":"timestamp","order_sn":"varchar(128)"}
--- primaryFields=order_sn
--- slowChanging=true
--- updateTable=ods_t_order__target_selected
--- updateType=incremental
--- transformerType=object
--- target=do_nothing
-
diff --git a/data-modeling/src/test/resources/tasks/postgres/source_to_ods.sql b/data-modeling/src/test/resources/tasks/postgres/source_to_ods.sql
deleted file mode 100644
index 728e579..0000000
--- a/data-modeling/src/test/resources/tasks/postgres/source_to_ods.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- workflow=source_to_ods
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.order
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- writeMode=append
-SELECT "order_sn" AS "order_sn",
- "product_code" AS "product_code",
- "product_name" AS "product_name",
- "product_version" AS "product_version",
- "product_status" AS "product_status",
- "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "product_count" AS "product_count",
- "price" AS "price",
- "discount" AS "discount",
- "order_status" AS "order_status",
- "order_create_time" AS "order_create_time",
- "order_update_time" AS "order_update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."order"
-WHERE "order_update_time" >= '${DATA_RANGE_START}' AND "order_update_time" < '${DATA_RANGE_END}';
diff --git a/data-modeling/src/test/resources/tasks/t_use.sql b/data-modeling/src/test/resources/tasks/t_use.sql
deleted file mode 100644
index 8e6aa59..0000000
--- a/data-modeling/src/test/resources/tasks/t_use.sql
+++ /dev/null
@@ -1,25 +0,0 @@
--- workflow=t_use
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.user
--- target=hive
--- dbName=ods
--- tableName=t_user
--- writeMode=append
-SELECT "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id",
- to_char("update_time", 'yyyy') as "year",
- to_char("update_time", 'MM') as "month",
- to_char("update_time", 'DD') as "day"
-FROM "postgres"."sales"."user"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}';
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParserSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParserSpec.scala
deleted file mode 100644
index 5d8dba6..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/excel/parser/DwdTableParserSpec.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.excel.parser
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-class DwdTableParserSpec extends AnyFlatSpec with should.Matchers {
-
- it should "read excel and encapsulate as objects" in {
- val filePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-hive.xlsx")
- .getPath
- val modelings = DwdTableParser.readDwdConfig(filePath)
- modelings.size should be(3)
-
- val modeling = modelings.head
-
- val dwdTableConfig = modeling.dwdTableConfig
- dwdTableConfig.sourceTable should be("t_order")
- dwdTableConfig.targetTable should be("t_fact_order")
- dwdTableConfig.factOrDim should be("fact")
-
- val columns = modeling.columns
-
- columns.size should be(26) // scalastyle:ignore
-
- val rowNumber3 = columns(2)
- rowNumber3.extraColumnExpression should be("zip_id_flag")
- rowNumber3.joinDb should be("dim")
- rowNumber3.joinTable should be("t_dim_product")
- rowNumber3.createDimMode should be("always")
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimGenHiveSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimGenHiveSpec.scala
deleted file mode 100644
index 75f33e9..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/AutoCreateDimGenHiveSpec.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import DwdWorkflowGen.genWorkflow
-
-class AutoCreateDimGenHiveSpec extends SqlUUIDSpec {
-
- it should "parse data modeling to SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-hive.xlsx")
- .getPath
-
- val dwdModelings = DwdTableParser.readDwdConfig(excelFilePath)
-
- val order = dwdModelings.head
- genWorkflow(order, "auto_create_dim").toString.trim should be(readExpectConfig(s"tasks/auto_create_dim.sql").trim)
-
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpec.scala
deleted file mode 100644
index a77cb33..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpec.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.OdsTableParser
-
-class OdsWorkflowGenSpec extends SqlUUIDSpec {
- it should "parse source to ods => SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("ods-template.xlsx")
- .getPath
-
- val odsModelings = OdsTableParser.readOdsConfig(excelFilePath)
-
- val example = odsModelings.head
-
- val workflow = OdsWorkflowGen.genWorkflow(example, "ods-template")
- workflow.toString.trim should be(readExpectConfig(s"tasks/ods-template.sql").trim)
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecMoreRowFilter.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecMoreRowFilter.scala
deleted file mode 100644
index c3a612a..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecMoreRowFilter.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.OdsTableParser
-
-class OdsWorkflowGenSpecMoreRowFilter extends SqlUUIDSpec {
- it should "parse source to ods with two row Filter Expression => SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("ods-template.xlsx")
- .getPath
- val odsModelings = OdsTableParser.readOdsConfig(excelFilePath)
- val example = odsModelings(2)
- val workflow = OdsWorkflowGen.genWorkflow(example, "ods-template2")
- workflow.toString.trim should be(readExpectConfig(s"tasks/ods-template2.sql").trim)
- }
-}
-
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecNoRowFilter.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecNoRowFilter.scala
deleted file mode 100644
index 65b51ef..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/OdsWorkflowGenSpecNoRowFilter.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.OdsTableParser
-
-class OdsWorkflowGenSpecNoRowFilter extends SqlUUIDSpec {
- it should "parse source to ods without row Filter Expression => SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("ods-template.xlsx")
- .getPath
- val odsModelings = OdsTableParser.readOdsConfig(excelFilePath)
- val use = odsModelings(1)
- val workflow = OdsWorkflowGen.genWorkflow(use, "t_use")
- workflow.toString.trim should be(readExpectConfig(s"tasks/t_use.sql").trim)
- }
-}
-
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/SqlUUIDSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/SqlUUIDSpec.scala
deleted file mode 100644
index cc50504..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/SqlUUIDSpec.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.IOUtil
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-class SqlUUIDSpec extends AnyFlatSpec with should.Matchers {
- val uuidRegex = "[a-f\\d]{8,32}"
-
- def readExpectConfig(path: String): String = {
- IOUtil
- .readLinesFromResource(path)
- .map(_.replaceAll(uuidRegex, "uuid"))
- .mkString("\n")
- .trim
- }
-
-
- def toActualConfig(steps: List[WorkflowStep]): String = {
- steps
- .map(_.toString.replaceAll(uuidRegex, "uuid"))
- .mkString("\n")
- .trim
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveFactSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveFactSpec.scala
deleted file mode 100644
index c8a0666..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveFactSpec.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import DwdWorkflowGen.genWorkflow
-
-class WorkflowStepGenHiveFactSpec extends SqlUUIDSpec {
-
-
- it should "parse data modeling to SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-hive.xlsx")
- .getPath
-
- val dwdModelings = DwdTableParser.readDwdConfig(excelFilePath)
-
- val factDevice = dwdModelings(2)
-
- genWorkflow(factDevice, "fact_device")
- .toString.trim should be(readExpectConfig(s"tasks/fact_device.sql").trim)
-
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveNoneSCDSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveNoneSCDSpec.scala
deleted file mode 100644
index 6f1fd1a..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveNoneSCDSpec.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import DwdWorkflowGen.genWorkflow
-
-class WorkflowStepGenHiveNoneSCDSpec extends SqlUUIDSpec {
-
-
- it should "parse data modeling to SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-event-hive.xlsx")
- .getPath
-
- val dwdModelings = DwdTableParser.readDwdConfig(excelFilePath)
-
- val eventModeling = dwdModelings.head
-
- genWorkflow(eventModeling, "fact_event")
- .toString.trim should be(readExpectConfig(s"tasks/fact_event.sql").trim)
-
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveSpec.scala
deleted file mode 100644
index d7792c0..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenHiveSpec.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import DwdWorkflowGen.genWorkflow
-
-class WorkflowStepGenHiveSpec extends SqlUUIDSpec {
-
- it should "parse data modeling to SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-hive.xlsx")
- .getPath
-
- val dwdModelings = DwdTableParser.readDwdConfig(excelFilePath)
-
- val dimStudent = dwdModelings(1)
- genWorkflow(dimStudent, "dim_student")
- .toString.trim should be(readExpectConfig(s"tasks/dim_student.sql").trim)
- }
-}
diff --git a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenPostgresSpec.scala b/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenPostgresSpec.scala
deleted file mode 100644
index a6a4ab4..0000000
--- a/data-modeling/src/test/scala/com/github/sharpdata/sharpetl/modeling/sql/gen/WorkflowStepGenPostgresSpec.scala
+++ /dev/null
@@ -1,198 +0,0 @@
-package com.github.sharpdata.sharpetl.modeling.sql.gen
-
-import com.github.sharpdata.sharpetl.modeling.excel.parser.DwdTableParser
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import AutoCreateDimSqlGen2.genAutoCreateDimStep
-import DwdExtractSqlGen.genExtractStep
-import DwdLoadSqlGen.genLoadStep
-import DwdTransformSqlGen.genTargetSelectStep
-import DwdTransformSqlGen2.{genMatchStep, generateReadMatchTableStep}
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-class WorkflowStepGenPostgresSpec extends AnyFlatSpec with should.Matchers {
- val uuidRegex = "[a-f\\d]{8,32}"
-
-
- it should "parse data modeling to SQL" in {
- val excelFilePath = this
- .getClass
- .getClassLoader
- .getResource("data-dict-v2-postgres.xlsx")
- .getPath
-
- val dwdModelings = DwdTableParser.readDwdConfig(excelFilePath)
-
- val factOrder = dwdModelings.head
-
- var steps = genExtractStep(factOrder, 1)
- toActualConfig(steps) should be(
- """-- step=1
- |-- source=postgres
- |-- dbName=ods
- |-- tableName=t_order
- |-- target=temp
- |-- tableName=t_order__extracted
- |-- writeMode=overwrite
- |select
- | "order_id" as "order_id",
- | "order_sn" as "order_sn",
- | "product_code" as "product_code",
- | "product_name" as "product_name",
- | "product_version" as "product_version",
- | "product_status" as "product_status",
- | "user_code" as "user_code",
- | "user_name" as "user_name",
- | "user_age" as "user_age",
- | "user_address" as "user_address",
- | "product_count" as "product_count",
- | "price" as "price",
- | "discount" as "discount",
- | "order_status" as "order_status",
- | "order_create_time" as "order_create_time",
- | "order_update_time" as "order_update_time",
- | price - discount as "actual"
- |from "ods"."t_order"
- |where "job_id" = '${DATA_RANGE_START}'
- |and product_code=1 or product_name='airport';""".stripMargin)
-
- steps = genAutoCreateDimStep(steps, factOrder, 2)._1
- val autoCreateDimSteps = steps.tail
- toActualConfig(autoCreateDimSteps) should be(
- """-- step=2
- |-- source=transformation
- |-- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcAutoCreateDimTransformer
- |-- methodName=transform
- |-- createDimMode=once
- |-- currentAndDimColumnsMapping={"order_create_time":"create_time","product_code":"mid","product_name":"name"}
- |-- currentAndDimPrimaryMapping={"product_code":"mid"}
- |-- currentBusinessCreateTime=order_create_time
- |-- dimDb=dim
- |-- dimDbType=postgres
- |-- dimTable=t_dim_product
- |-- dimTableColumnsAndType={"create_time":"timestamp","mid":"varchar(128)","name":"varchar(128)"}
- |-- updateTable=t_order__extracted
- |-- transformerType=object
- |-- target=do_nothing
- |
- |-- step=3
- |-- source=transformation
- |-- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcAutoCreateDimTransformer
- |-- methodName=transform
- |-- createDimMode=always
- |-- currentAndDimColumnsMapping={"order_create_time":"create_time","user_code":"code","user_name":"name","user_age":"age","user_address":"address"}
- |-- currentAndDimPrimaryMapping={"user_code":"code"}
- |-- currentBusinessCreateTime=order_create_time
- |-- dimDb=dim
- |-- dimDbType=postgres
- |-- dimTable=t_dim_user
- |-- dimTableColumnsAndType={"create_time":"timestamp","code":"varchar(128)","name":"varchar(128)","age":"int","address":"varchar(128)"}
- |-- updateTable=t_order__extracted
- |-- transformerType=object
- |-- target=do_nothing""".stripMargin)
-
- steps = generateReadMatchTableStep(steps, factOrder, 4)._1
- val readMatchTableSteps = steps.slice(3,5)
- toActualConfig(readMatchTableSteps) should be(
- """-- step=4
- |-- source=postgres
- |-- dbName=dim
- |-- tableName=t_dim_product
- |-- target=temp
- |-- tableName=dim_t_dim_product__matched
- |-- writeMode=append
- |select
- | "id", "mid", "start_time", "end_time"
- |from "dim"."t_dim_product";
- |
- |-- step=5
- |-- source=postgres
- |-- dbName=dim
- |-- tableName=t_dim_user
- |-- target=temp
- |-- tableName=dim_t_dim_user__matched
- |-- writeMode=append
- |select
- | "dim_user_id", "user_info_code", "start_time", "end_time"
- |from "dim"."t_dim_user";""".stripMargin)
-
- steps = genMatchStep(steps, factOrder, 6)._1
- val matchStep = steps.last
- toActualConfig(List(matchStep)) should be(
- """-- step=6
- |-- source=temp
- |-- tableName=t_order__extracted
- |-- target=temp
- |-- tableName=t_order__joined
- |-- writeMode=append
- |select
- | `t_order__extracted`.*,
- | case when `dim_t_dim_product__matched`.`id` is null then '-1'
- | else `dim_t_dim_product__matched`.`id` end as `product_id`,
- | case when `dim_t_dim_user__matched`.`dim_user_id` is null then '-1'
- | else `dim_t_dim_user__matched`.`dim_user_id` end as `user_id`
- |from `t_order__extracted`
- |left join `dim_t_dim_product__matched`
- | on `t_order__extracted`.`product_code` = `dim_t_dim_product__matched`.`mid`
- | and `t_order__extracted`.`order_create_time` >= `dim_t_dim_product__matched`.`start_time`
- | and (`t_order__extracted`.`order_create_time` < `dim_t_dim_product__matched`.`end_time`
- | or `dim_t_dim_product__matched`.`end_time` is null)
- |
- |left join `dim_t_dim_user__matched`
- | on `t_order__extracted`.`user_code` = `dim_t_dim_user__matched`.`user_info_code`
- | and `t_order__extracted`.`order_create_time` >= `dim_t_dim_user__matched`.`start_time`
- | and (`t_order__extracted`.`order_create_time` < `dim_t_dim_user__matched`.`end_time`
- | or `dim_t_dim_user__matched`.`end_time` is null);""".stripMargin)
-
- steps = genTargetSelectStep(steps, factOrder, 7)
- val targetStep = steps.last
- toActualConfig(List(targetStep)) should be(
- """-- step=7
- |-- source=temp
- |-- tableName=t_order__joined
- |-- target=temp
- |-- tableName=t_order__target_selected
- |-- writeMode=overwrite
- |select
- | `order_id`,
- | `order_sn`,
- | `product_id`,
- | `user_id`,
- | `product_count`,
- | `price`,
- | `discount`,
- | `order_status`,
- | `order_create_time`,
- | `order_update_time`,
- | `actual`
- |from `t_order__joined`;""".stripMargin)
-
- steps = genLoadStep(steps, factOrder, 8)
- val loadStep = steps.last
- toActualConfig(List(loadStep)) should be(
- """-- step=8
- |-- source=transformation
- |-- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
- |-- methodName=transform
- |-- businessCreateTime=order_create_time
- |-- businessUpdateTime=order_update_time
- |-- currentDb=dwd
- |-- currentDbType=postgres
- |-- currentTable=t_fact_order
- |-- currentTableColumnsAndType={"order_id":"varchar(128)","order_sn":"varchar(128)","product_id":"varchar(128)","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","discount":"decimal(10,4)","order_status":"varchar(128)","order_create_time":"timestamp","order_update_time":"timestamp","actual":"decimal(10,4)"}
- |-- primaryFields=order_id,product_id
- |-- slowChanging=false
- |-- updateTable=t_order__target_selected
- |-- updateType=full
- |-- transformerType=object
- |-- target=do_nothing""".stripMargin)
- }
-
-
- def toActualConfig(steps: List[WorkflowStep]): String = {
- steps
- .map(_.toString.replaceAll(uuidRegex, "uuid"))
- .mkString("\n")
- .trim
- }
-}
diff --git a/datasource/bigquery/build.gradle b/datasource/bigquery/build.gradle
deleted file mode 100644
index 08643f9..0000000
--- a/datasource/bigquery/build.gradle
+++ /dev/null
@@ -1,30 +0,0 @@
-plugins {
- id "java-library"
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- if (sparkVersion.startsWith("2.3")) {
- implementation "com.google.cloud.spark:spark-bigquery-with-dependencies_$scalaVersion:0.26.0"
- } else if (sparkVersion.startsWith("2.4")) {
- implementation "com.google.cloud.spark:spark-2.4-bigquery:0.26.0-preview"
- } else if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.2")) {
- implementation "com.google.cloud.spark:spark-3.1-bigquery:0.26.0-preview"
- } else if (sparkVersion.startsWith("3.0")) {
- implementation "com.google.cloud.spark:spark-bigquery-with-dependencies_$scalaVersion:0.26.0"
- }
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
-
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/datasource/bigquery/src/main/scala/com/github/sharpdata/sharpetl/datasource/bigquery/BigQueryDataSource.scala b/datasource/bigquery/src/main/scala/com/github/sharpdata/sharpetl/datasource/bigquery/BigQueryDataSource.scala
deleted file mode 100644
index bc93fc4..0000000
--- a/datasource/bigquery/src/main/scala/com/github/sharpdata/sharpetl/datasource/bigquery/BigQueryDataSource.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.bigquery
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.datasource.config.BigQueryDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import scala.jdk.CollectionConverters._
-
-@source(types = Array("bigquery"))
-class BigQueryDataSource extends Source[DataFrame, SparkSession] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val bigQueryDataSourceConfig = step.source.asInstanceOf[BigQueryDataSourceConfig]
- val bigQueryConfig: Map[String, String] = bigqueryProps(bigQueryDataSourceConfig.getSystem)
-
- executionContext
- .read
- .format("com.google.cloud.spark.bigquery")
- .options(bigQueryConfig)
- .load(step.sql)
- }
-
- def bigqueryProps(system: String): Map[String, String] = {
- val prefix = s"bigquery.$system."
- ETLConfig.plainProperties.filterKeys(_.startsWith(prefix))
- .map { case (k, v) => (k.replaceFirst(prefix, ""), v) }
- .toMap
- }
-}
diff --git a/datasource/elasticsearch/build.gradle b/datasource/elasticsearch/build.gradle
deleted file mode 100644
index f7c36f1..0000000
--- a/datasource/elasticsearch/build.gradle
+++ /dev/null
@@ -1,28 +0,0 @@
-plugins {
- id "java-library"
- id 'java'
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- if (sparkVersion.startsWith("2.3")) {
- implementation group: "org.elasticsearch", name: "elasticsearch-spark-20_$scalaVersion", version: "7.7.0"
- } else if (sparkVersion.startsWith("2.4")) {
- implementation group: "org.elasticsearch", name: "elasticsearch-spark-20_$scalaVersion", version: "7.14.0"
- } else if (sparkVersion.startsWith("3") & scalaVersion.startsWith("2.12")) {
- implementation group: "org.elasticsearch", name: "elasticsearch-spark-30_$scalaVersion", version: "7.16.2"
- } else if (sparkVersion.startsWith("3") & scalaVersion.startsWith("2.13")) {
- implementation group: "org.elasticsearch", name: "elasticsearch-spark-30_$scalaVersion", version: "8.1.0"
- }
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsConfig.scala b/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsConfig.scala
deleted file mode 100644
index 38c4bf6..0000000
--- a/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsConfig.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.es
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import org.apache.commons.lang3.StringUtils
-import org.elasticsearch.hadoop.cfg.ConfigurationOptions
-
-object EsConfig {
-
- def buildConfig(mappingId: String = ""): Map[String, String] = {
- if (StringUtils.isNotEmpty(mappingId)) {
- // update 模式必须设置 es.mapping.id
- defaultConfig += ConfigurationOptions.ES_MAPPING_ID -> mappingId
- }
- defaultConfig
- }
-
- private var defaultConfig: Map[String, String] = {
- Map(
- // hosts
- ConfigurationOptions.ES_NODES -> ETLConfig.getProperty("es.nodes"),
- // userName
- ConfigurationOptions.ES_NET_HTTP_AUTH_USER -> ETLConfig.getProperty("es.net.http.auth.user"),
- // password
- ConfigurationOptions.ES_NET_HTTP_AUTH_PASS -> ETLConfig.getProperty("es.net.http.auth.pass"),
- // 是否在进行批处理写入后触发索引刷新,只有在执行了整个写入(意味着多个批量更新)之后才会调用此方法,默认 true
- ConfigurationOptions.ES_BATCH_WRITE_REFRESH -> ETLConfig.getProperty("es.batch.write.refresh"),
- // 是否自动创建 index,默认 true
- ConfigurationOptions.ES_INDEX_AUTO_CREATE -> ETLConfig.getProperty("es.index.auto.create"),
- // 默认 1000
- ConfigurationOptions.ES_BATCH_SIZE_ENTRIES -> ETLConfig.getProperty("es.batch.size.entries"),
- // 默认 1mb
- ConfigurationOptions.ES_BATCH_SIZE_BYTES -> ETLConfig.getProperty("es.batch.size.bytes"),
- // 默认为 index,共五种输出模式:index、create、update、upsert、delete
- ConfigurationOptions.ES_WRITE_OPERATION -> ETLConfig.getProperty("es.write.operation"),
- // 字段值为 null 时是否写入此字段,默认为 false
- ConfigurationOptions.ES_SPARK_DATAFRAME_WRITE_NULL_VALUES -> ETLConfig.getProperty("es.spark.dataframe.write.null"),
- // 默认 false
- ConfigurationOptions.ES_NODES_WAN_ONLY -> ETLConfig.getProperty("es.nodes.wan.only")
- )
- }
-
-}
diff --git a/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsDataSource.scala b/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsDataSource.scala
deleted file mode 100644
index 7839859..0000000
--- a/datasource/elasticsearch/src/main/scala/com/github/sharpdata/sharpetl/datasource/es/EsDataSource.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.es
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.apache.spark.sql.DataFrame
-import org.elasticsearch.spark.sql.EsSparkSQL
-
-@sink(types = Array("es"))
-class EsDataSource extends Sink[DataFrame] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val targetConfig = step.target.asInstanceOf[DBDataSourceConfig]
- val esConfig = EsConfig.buildConfig(targetConfig.getPrimaryKeys).++(targetConfig.getOptions)
- EsSparkSQL.saveToEs(
- srdd = df,
- resource = targetConfig.getTableName,
- cfg = esConfig
- )
- }
-}
diff --git a/datasource/hive2/build.gradle b/datasource/hive2/build.gradle
deleted file mode 100644
index dd23568..0000000
--- a/datasource/hive2/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-plugins {
- id "java-library"
- id 'java'
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- api "org.apache.spark:spark-hive_$scalaVersion:$sparkVersion"
-}
-
-test {
- useJUnitPlatform()
-}
diff --git a/datasource/hive2/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala b/datasource/hive2/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala
deleted file mode 100644
index c91bed3..0000000
--- a/datasource/hive2/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.hive
-
-import org.apache.hadoop.hive.conf.HiveConf
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient
-import org.apache.hadoop.hive.metastore.api.FieldSchema
-import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
-import org.apache.spark.sql.types.{StructField, StructType}
-
-import scala.jdk.CollectionConverters._
-
-object HiveMetaStoreUtil {
- private var isClosed: Boolean = _
- private var hiveMetaStoreClient: HiveMetaStoreClient = _
-
- private def createHiveMetaStoreClient(): Unit = {
- try {
- /**
- * took from
- */
- /**
- * previously (Hive 1.2 and 2.1) received as argument an [[org.apache.hadoop.hive.conf.HiveConf]] object
- */
- this.hiveMetaStoreClient = new HiveMetaStoreClient(new HiveConf)
- isClosed = false
- } catch {
- case e: Exception =>
- e.printStackTrace()
- throw e
- }
- }
-
- def getHiveMetaStoreClient: HiveMetaStoreClient = {
- if (this.hiveMetaStoreClient == null) {
- createHiveMetaStoreClient()
- }
- if (isClosed) {
- this.hiveMetaStoreClient.reconnect()
- }
- this.hiveMetaStoreClient
- }
-
- def closeHiveMetaStoreClient(): Unit = {
- if (this.hiveMetaStoreClient != null && !isClosed) {
- this.hiveMetaStoreClient.close()
- isClosed = true
- }
- }
-
- def getHiveTableStructType(dbName: String, tableName: String): StructType = {
- fieldSchema2StructType(
- HiveMetaStoreUtil.getHiveTableAllCols(dbName, tableName)
- )
- }
-
- def fieldSchema2StructType(fieldSchemaArray: Array[FieldSchema]): StructType = {
- StructType(
- fieldSchemaArray.map(fieldSchema => StructField(
- fieldSchema.getName,
- CatalystSqlParser.parseDataType(fieldSchema.getType)
- ))
- )
- }
-
- def getHiveTablePartitionColNames(dbName: String, tableName: String): Array[String] = {
- getHiveTablePartitionCols(dbName, tableName).map(_.getName)
- }
-
- def getHiveTablePartitionCols(dbName: String, tableName: String): Array[FieldSchema] = {
- getHiveMetaStoreClient
- .getTable(dbName, tableName)
- .getPartitionKeys
- .asScala
- .toArray
- }
-
- def getHiveTableNonePartitionColNames(dbName: String, tableName: String): Array[String] = {
- getHiveTableNonePartitionCols(dbName, tableName).map(_.getName)
- }
-
- def getHiveTableNonePartitionCols(dbName: String, tableName: String): Array[FieldSchema] = {
- getHiveMetaStoreClient
- .getTable(dbName, tableName)
- .getSd
- .getCols
- .asScala
- .toArray
- }
-
- def getHiveTableAllColNames(dbName: String, tableName: String): Array[String] = {
- Array.concat(
- getHiveTableNonePartitionColNames(dbName, tableName),
- getHiveTablePartitionColNames(dbName, tableName)
- )
- }
-
- def getHiveTableAllCols(dbName: String, tableName: String): Array[FieldSchema] = {
- Array.concat(
- getHiveTableNonePartitionCols(dbName, tableName),
- getHiveTablePartitionCols(dbName, tableName)
- )
- }
-
-}
diff --git a/datasource/hive3/build.gradle b/datasource/hive3/build.gradle
deleted file mode 100644
index 3ff3142..0000000
--- a/datasource/hive3/build.gradle
+++ /dev/null
@@ -1,22 +0,0 @@
-plugins {
- id "java-library"
- id 'java'
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- api 'org.apache.hive:hive-common:3.1.2'
- api 'org.apache.hive:hive-metastore:3.1.2'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/datasource/hive3/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala b/datasource/hive3/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala
deleted file mode 100644
index 1d36ea4..0000000
--- a/datasource/hive3/src/main/scala/com/github/sharpdata/sharpetl/datasource/hive/HiveMetaStoreUtil.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.hive
-
-import org.apache.hadoop.hive.conf.HiveConf
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient
-import org.apache.hadoop.hive.metastore.api.FieldSchema
-import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
-import org.apache.spark.sql.types.{StructField, StructType}
-
-import scala.jdk.CollectionConverters._
-
-object HiveMetaStoreUtil {
- private var isClosed: Boolean = _
- private var hiveMetaStoreClient: HiveMetaStoreClient = _
-
- private def createHiveMetaStoreClient(): Unit = {
- try {
- /**
- * took from
- */
- try {
- /**
- * previously (Hive 1.2 and 2.1) received as argument an [[org.apache.hadoop.hive.conf.HiveConf]] object
- */
- this.hiveMetaStoreClient = new HiveMetaStoreClient(new HiveConf)
- } catch {
- case _: NoSuchMethodError =>
-
- /**
- * (Hive 3.1) is [[org.apache.hadoop.conf.Configuration]]
- */
- this.hiveMetaStoreClient = new HiveMetaStoreClient(new org.apache.hadoop.conf.Configuration())
- }
- isClosed = false
- } catch {
- case e: Exception =>
- e.printStackTrace()
- throw e
- }
- }
-
- def getHiveMetaStoreClient: HiveMetaStoreClient = {
- if (this.hiveMetaStoreClient == null) {
- createHiveMetaStoreClient()
- }
- if (isClosed) {
- this.hiveMetaStoreClient.reconnect()
- }
- this.hiveMetaStoreClient
- }
-
- def closeHiveMetaStoreClient(): Unit = {
- if (this.hiveMetaStoreClient != null && !isClosed) {
- this.hiveMetaStoreClient.close()
- isClosed = true
- }
- }
-
- def getHiveTableStructType(dbName: String, tableName: String): StructType = {
- fieldSchema2StructType(
- HiveMetaStoreUtil.getHiveTableAllCols(dbName, tableName)
- )
- }
-
- def fieldSchema2StructType(fieldSchemaArray: Array[FieldSchema]): StructType = {
- StructType(
- fieldSchemaArray.map(fieldSchema => StructField(
- fieldSchema.getName,
- CatalystSqlParser.parseDataType(fieldSchema.getType)
- ))
- )
- }
-
- def getHiveTablePartitionColNames(dbName: String, tableName: String): Array[String] = {
- getHiveTablePartitionCols(dbName, tableName).map(_.getName)
- }
-
- def getHiveTablePartitionCols(dbName: String, tableName: String): Array[FieldSchema] = {
- getHiveMetaStoreClient
- .getTable(dbName, tableName)
- .getPartitionKeys
- .asScala
- .toArray
- }
-
- def getHiveTableNonePartitionColNames(dbName: String, tableName: String): Array[String] = {
- getHiveTableNonePartitionCols(dbName, tableName).map(_.getName)
- }
-
- def getHiveTableNonePartitionCols(dbName: String, tableName: String): Array[FieldSchema] = {
- getHiveMetaStoreClient
- .getTable(dbName, tableName)
- .getSd
- .getCols
- .asScala
- .toArray
- }
-
- def getHiveTableAllColNames(dbName: String, tableName: String): Array[String] = {
- Array.concat(
- getHiveTableNonePartitionColNames(dbName, tableName),
- getHiveTablePartitionColNames(dbName, tableName)
- )
- }
-
- def getHiveTableAllCols(dbName: String, tableName: String): Array[FieldSchema] = {
- Array.concat(
- getHiveTableNonePartitionCols(dbName, tableName),
- getHiveTablePartitionCols(dbName, tableName)
- )
- }
-
-}
diff --git a/datasource/kafka/build.gradle b/datasource/kafka/build.gradle
deleted file mode 100644
index a5a08dc..0000000
--- a/datasource/kafka/build.gradle
+++ /dev/null
@@ -1,28 +0,0 @@
-plugins {
- id "java-library"
- id "com.github.maiflai.scalatest"
- id "de.undercouch.download" version "4.1.1"
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- implementation "org.apache.spark:spark-streaming-kafka-0-10_$scalaVersion:$sparkVersion"
- implementation 'com.google.code.gson:gson:2.8.9'
-
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-flatspec_$scalaVersion", version: "3.2.11"
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/BatchKafkaDataSource.scala b/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/BatchKafkaDataSource.scala
deleted file mode 100644
index bd70dce..0000000
--- a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/BatchKafkaDataSource.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kafka
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.BatchKafkaDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.config.KafkaDataFormat.{AVRO, JSON}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import KafkaConfig.{buildSparkKafkaConsumerConfig, buildSparkKafkaProducerConfig, schemaMapping}
-import OffsetRange.offsetEncoder
-import org.apache.spark.sql.functions.{col, from_json, lit, struct, to_json}
-import org.apache.spark.sql.types.StructType
-import org.apache.spark.sql.{DataFrame, SparkSession}
-import DFConversations._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-
-import java.util.UUID
-import scala.jdk.CollectionConverters._
-
-@source(types = Array("batch_kafka"))
-@sink(types = Array("batch_kafka"))
-class BatchKafkaDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- import executionContext.implicits._
- val kafkaDataSourceConfig = step.source.asInstanceOf[BatchKafkaDataSourceConfig]
-
- ETLLogger.info(
- s"""
- |Start processing data of topics ${kafkaDataSourceConfig.topics}
- |with startingOffsets ${jobLog.dataRangeStart} and
- |endingOffsets ${jobLog.dataRangeEnd}
- |""".stripMargin)
-
- val kafkaProps = buildSparkKafkaConsumerConfig(kafkaDataSourceConfig.groupId, kafkaDataSourceConfig.topics)
-
- val sourceSchema: StructType = StructType.fromDDL(kafkaDataSourceConfig.getSchemaDDL)
- val schemaMappingExpr = schemaMapping(sourceSchema)
-
- kafkaDataSourceConfig.format match {
- case JSON =>
- val originDf = executionContext
- .read
- .format("kafka")
- .option("startingOffsets", jobLog.dataRangeStart)
- .option("endingOffsets", jobLog.dataRangeEnd) //useful when re-run old job
- .options(kafkaProps)
- .load()
-
- if (originDf.isEmpty) {
- ETLLogger.warn(s"There are no new data need to be processed, set dataRangeEnd to ${jobLog.dataRangeStart}")
- jobLog.dataRangeEnd = jobLog.dataRangeStart
- } else if (jobLog.dataRangeEnd != "latest") {
- () // do nothing when re-run topic based job
- } else {
- val tempViewName = s"`${UUID.randomUUID().toString.split("-").head}`"
-
- originDf.createOrReplaceTempView(tempViewName)
-
- jobLog.dataRangeEnd =
- executionContext.sql(
- s"""
- |select `topic`,`partition`, max(offset) + 1 as maxOffset
- |from $tempViewName group by `topic`,`partition`""".stripMargin)
- .as[OffsetRange](offsetEncoder)
- .collectAsList()
- .asScala
- .toList
- .asEndJson
- ETLLogger.info(s"Newer data polled from kafka topic ${kafkaDataSourceConfig.topics}, update endingOffsets to ${jobLog.dataRangeEnd}")
- }
-
- val messageColumnNames = kafkaDataSourceConfig.topicMessageColumns match {
- case value: String if !isNullOrEmpty(value) => kafkaDataSourceConfig.topicMessageColumns.split(",").map(_.trim)
- case _ => Array.empty[String]
- }
- val exprColumns = "CAST(value as STRING)" +: messageColumnNames.map(it => s"CAST($it as STRING)")
- val selectColumns = (from_json($"value", sourceSchema) as "data") +: messageColumnNames.map(col)
- val allSchemaMappingExpr = schemaMappingExpr ++ messageColumnNames.toSeq
- originDf.selectExpr(exprColumns: _*).select(selectColumns: _*).selectExpr(allSchemaMappingExpr: _*)
-
- case AVRO => ???
- case _ => ???
- }
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val kafkaDataSourceConfig = step.target.asInstanceOf[BatchKafkaDataSourceConfig]
-
- {
- if (kafkaDataSourceConfig.enableSerDes.toLowerCase().toBoolean) {
- df.select(lit(""), to_json(struct("*")))
- } else {
- df.select(lit(""), struct("json"))
- }
- }.toDF("key", "value")
- .selectExpr("CAST(key AS STRING) as key", "CAST(value AS STRING) as value")
- .write
- .format("kafka")
- .options(buildSparkKafkaProducerConfig(kafkaDataSourceConfig.topics))
- .save()
- }
-}
diff --git a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/DFConversations.scala b/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/DFConversations.scala
deleted file mode 100644
index f7c5c50..0000000
--- a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/DFConversations.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kafka
-
-import org.apache.spark.sql.DataFrame
-
-object DFConversations {
- implicit class DataFrameConversations(df: DataFrame) {
- def isEmpty: Boolean = {
- df.limit(1).count() == 0
- }
- }
-}
diff --git a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/KafkaConfig.scala b/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/KafkaConfig.scala
deleted file mode 100644
index 321e8a0..0000000
--- a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/KafkaConfig.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kafka
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.MissingConfigurationException
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, StringUtil}
-import org.apache.kafka.common.serialization.StringDeserializer
-import org.apache.spark.sql.types.StructType
-
-import scala.jdk.CollectionConverters._
-import java.util.Locale
-
-
-object KafkaConfig {
-
- private lazy val defaultStructuredStreamingConfig: Map[String, String] = {
- val props = kafkaProps("consumer")
- val mustHaveConf = Set("bootstrap.servers", "startingOffsets", "failOnDataLoss", "zookeeper.connect")
- if (!mustHaveConf.subsetOf(props.keySet)) {
- throw MissingConfigurationException(s"Kafka prop is missing, ${mustHaveConf.map("kafka." + _).mkString(",")}" +
- s" must present in application-{env}.properties file.")
- }
- props
- }
-
- def buildStructuredStreamingConfig(topics: String): Map[String, String] = {
- Map(
- "subscribe" -> topics
- ) ++ defaultStructuredStreamingConfig
- }
-
- def buildNativeKafkaProducerConfig(groupId: String): Map[String, Object] = {
- Map(
- "key.deserializer" -> classOf[StringDeserializer],
- "value.deserializer" -> classOf[StringDeserializer],
- "group.id" -> groupId
- ) ++ nativeKafkaProps("producer")
- }
-
- def buildNativeKafkaConsumerConfig(groupId: String): Map[String, Object] = {
- Map(
- "key.deserializer" -> classOf[StringDeserializer],
- "value.deserializer" -> classOf[StringDeserializer],
- "group.id" -> groupId
- ) ++ nativeKafkaProps("consumer")
- }
-
- def buildSparkKafkaConsumerConfig(groupId: String, topics: String): Map[String, String] = {
- Map(
- "key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
- "value.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
- "group.id" -> groupId,
- "subscribe" -> topics
- ) ++ kafkaProps("consumer")
- }
-
- def buildSparkKafkaProducerConfig(topics: String): Map[String, String] = {
- Map(
- "topic" -> topics
- ) ++ kafkaProps("producer")
- }
-
- def kafkaProps(prop: String): Map[String, String] = {
- val prefix = s"kafka.$prop."
- ETLConfig.plainProperties.filterKeys(_.startsWith(prefix))
- .map { case (k, v) => (k.replaceFirst(prefix, ""), v) }
- .toMap
- }
-
- // copy from org.apache.spark.sql.kafka010.KafkaSourceProvider.createSource
- def nativeKafkaProps(prop: String): Map[String, String] = {
- val parameters = kafkaProps(prop)
- parameters
- .keySet
- .filter(_.toLowerCase(Locale.ROOT).startsWith("kafka."))
- .map { k => k.drop(6) -> parameters(k) }
- .toMap
-
- }
-
- def schemaMapping: StructType => Seq[String] = {
- schema =>
- schema
- .fieldNames
- .map(fieldName => s"""data.$fieldName as ${StringUtil.humpToUnderline(fieldName)}""")
- }
-}
diff --git a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/OffsetRange.scala b/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/OffsetRange.scala
deleted file mode 100644
index a256e5b..0000000
--- a/datasource/kafka/src/main/scala/com/github/sharpdata/sharpetl/datasource/kafka/OffsetRange.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kafka
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import org.apache.spark.sql.{Encoder, Encoders}
-
-import scala.jdk.CollectionConverters._
-
-final case class OffsetRange(topic: String, partition: String, maxOffset: Long)
-
-object OffsetRange {
- val offsetEncoder: Encoder[OffsetRange] = Encoders.product[OffsetRange]
-
- implicit class OffsetRangeConverter(values: List[OffsetRange]) {
- def asEndJson: String = {
- val offsetRangeObj = values.groupBy(_.topic)
- .map { case (topic, offsetRanges) =>
- (topic, offsetRanges.map(it => (it.partition, it.maxOffset)).toMap.asJava)
- }.asJava
-
- new ObjectMapper().writeValueAsString(offsetRangeObj)
- }
- }
-}
diff --git a/datasource/kafka/src/test/resources/application.properties_encrypted b/datasource/kafka/src/test/resources/application.properties_encrypted
deleted file mode 100644
index 0bd408d..0000000
--- a/datasource/kafka/src/test/resources/application.properties_encrypted
+++ /dev/null
@@ -1,12 +0,0 @@
-etl.workflow.path=tasks
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-some.password=ENC(XVfx2J3gunlX5v+/6YEwpU5/cgKsSr3LNFYheL5Eg5aYh13BnWWE6g==)
-
-kafka.producer.kafka.bootstrap.servers=localhost:9093
-kafka.producer.kafka.security.protocol=SASL_SSL
-kafka.producer.kafka.sasl.kerberos.service.name=kafka
-kafka.producer.kafka.ssl.truststore.location=/var/lib/cloudera-scm-agent/agent-cert/cm-auto-global_truststore.jks
-kafka.producer.kafka.ssl.truststore.password=ENC(XVfx2J3gunlX5v+/6YEwpU5/cgKsSr3LNFYheL5Eg5aYh13BnWWE6g==)
\ No newline at end of file
diff --git a/datasource/kafka/src/test/resources/etl.key b/datasource/kafka/src/test/resources/etl.key
deleted file mode 100644
index aefe06c..0000000
Binary files a/datasource/kafka/src/test/resources/etl.key and /dev/null differ
diff --git a/datasource/kafka/src/test/scala/com/github/sharpdata/sharpetl/datasource/kafka/ETLKafkaConfigSpec.scala b/datasource/kafka/src/test/scala/com/github/sharpdata/sharpetl/datasource/kafka/ETLKafkaConfigSpec.scala
deleted file mode 100644
index fba54f2..0000000
--- a/datasource/kafka/src/test/scala/com/github/sharpdata/sharpetl/datasource/kafka/ETLKafkaConfigSpec.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kafka
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import KafkaConfig.buildNativeKafkaProducerConfig
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.should
-
-class ETLKafkaConfigSpec extends AnyFunSpec with BeforeAndAfterEach with should.Matchers {
- it("should read plain text password from encrypted text") {
- val filePath = getClass.getResource("/application.properties_encrypted").toString
- ETLConfig.setPropertyPath(filePath)
-
- val props = buildNativeKafkaProducerConfig("fake-group-id")
- props("ssl.truststore.password") should be("plain text password: 1qaz@WSX")
- }
-
-
- override protected def beforeEach(): Unit = {
- reinitializeETLConfig()
- super.beforeEach()
- }
-
- // trick to reinitialize object, each test starts with a fresh properties
- private def reinitializeETLConfig() = {
- val cons = ETLConfig.getClass.getDeclaredConstructor()
- cons.setAccessible(true)
- cons.newInstance()
- }
-}
diff --git a/datasource/kudu/build.gradle b/datasource/kudu/build.gradle
deleted file mode 100644
index 257660b..0000000
--- a/datasource/kudu/build.gradle
+++ /dev/null
@@ -1,44 +0,0 @@
-plugins {
- id "java-library"
- id 'java'
- id "com.github.maiflai.scalatest"
-}
-
-group 'com.github.sharpdata.sharpetl.datasource'
-version '0.2.0'
-
-
-dependencies {
- implementation(project(":core"))
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- if (scalaVersion.startsWith('2.11')) {
- implementation group: "org.apache.kudu", name: "kudu-spark2_$scalaVersion", version: "1.9.0"
- } else if (sparkVersion.startsWith("3") && scalaVersion.startsWith("2.12")) {
- implementation group: "org.apache.kudu", name: "kudu-spark3_$scalaVersion", version: "1.15.0"
- }
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-funspec_$scalaVersion", version: "3.2.11"
-}
-
-if ((scalaVersion.startsWith('2.12') && sparkVersion.startsWith("2.4")) ||
- sparkVersion.startsWith("3") && scalaVersion.startsWith("2.13")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/spark_2.4_scala_212']
- }
- }
- test {
- scala {
- srcDirs = ['test/main/spark_2.4_scala_212']
- }
- }
- }
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/datasource/kudu/src/main/scala/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala b/datasource/kudu/src/main/scala/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala
deleted file mode 100644
index 598881b..0000000
--- a/datasource/kudu/src/main/scala/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala
+++ /dev/null
@@ -1,166 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kudu
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.{DataSourceType, WriteMode}
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import org.apache.kudu.client.CreateTableOptions
-import org.apache.kudu.spark.kudu.{KuduContext, KuduWriteOptions}
-import org.apache.spark.sql.types.StructType
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("kudu", "impala_kudu"))
-@sink(types = Array("kudu", "impala_kudu"))
-class KuduDataSource(executionContext: SparkSession) extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
- private val KUDU = "org.apache.kudu.spark.kudu"
- private val KUDU_MASTER = "kudu.master"
- private val KUDU_TABLE = "kudu.table"
-
- private val kuduMaster = ETLConfig.getProperty(KUDU_MASTER)
-
- lazy val kuduContext = new KuduContext(
- kuduMaster,
- executionContext.sparkContext
- )
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val sourceConfig = step.getSourceConfig[DBDataSourceConfig]
- val df = executionContext
- .read
- .options(buildKuduLoadOptions(sourceConfig))
- .format(KUDU)
- .load
- val selectSql = step.getSql
- if (selectSql != null) {
- val tableName = sourceConfig.getTableName
- df.createOrReplaceTempView(tableName)
- ETLLogger.info(s"""[step${step.getStep}] Select Sql: \n$selectSql""")
- executionContext.sql(selectSql)
- } else {
- df
- }
-
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ???
-
-
- def tableExists(tableName: String): Boolean = {
- kuduContext.tableExists(tableName)
- }
-
- def createTable(
- tableName: String,
- schema: StructType,
- keys: Seq[String],
- options: CreateTableOptions): Unit = {
- if (!tableExists(tableName)) {
- kuduContext.createTable(
- tableName,
- schema,
- keys,
- options
- )
- }
- }
-
- def deleteTable(tableName: String): Unit = {
- if (tableExists(tableName)) {
- kuduContext.deleteTable(tableName)
- }
- }
-
-
- def buildKuduLoadOptions(sourceConfig: DBDataSourceConfig): Map[String, String] = {
- val dataSourceType = sourceConfig.getDataSourceType
- val kuduTable = dataSourceType.toLowerCase match {
- case DataSourceType.IMPALA_KUDU =>
- s"${ETLConfig.getProperty("kudu.table.prefix")}${sourceConfig.getDbName}.${sourceConfig.getTableName}"
- case DataSourceType.KUDU =>
- sourceConfig.getTableName
- }
- Map(
- KUDU_MASTER -> kuduMaster,
- KUDU_TABLE -> kuduTable
- ).++(sourceConfig.getOptions)
- }
-
- def save(
- df: DataFrame,
- dbName: String,
- kuduTable: String,
- writeMode: String
- ): Unit = {
- save(df, dbName, kuduTable, writeMode, new KuduWriteOptions())
- }
-
- def save(
- df: DataFrame,
- dbName: String,
- kuduTable: String,
- writeMode: String,
- kuduWriteOptions: KuduWriteOptions
- ): Unit = {
- val impalaKuduTable = s"${ETLConfig.getProperty("kudu.table.prefix")}$dbName.$kuduTable"
- save(df, impalaKuduTable, writeMode, kuduWriteOptions)
- }
-
- def save(
- df: DataFrame,
- kuduTable: String,
- writeMode: String
- ): Unit = {
- save(df, kuduTable, writeMode, new KuduWriteOptions())
- }
-
- def save(
- df: DataFrame, kuduTable: String, writeMode: String,
- kuduWriteOptions: KuduWriteOptions): Unit = {
- writeMode.toLowerCase match {
- case WriteMode.APPEND =>
- save(df, kuduTable)
- case WriteMode.UPSERT =>
- upsert(df, kuduTable, kuduWriteOptions)
- case WriteMode.DELETE =>
- delete(df, kuduTable, kuduWriteOptions)
- }
- }
-
- def save(
- df: DataFrame,
- kuduTable: String,
- kuduWriteOptions: KuduWriteOptions = new KuduWriteOptions): Unit = {
- kuduContext.insertRows(
- df,
- kuduTable,
- kuduWriteOptions
- )
- }
-
- def upsert(
- df: DataFrame,
- kuduTable: String,
- kuduWriteOptions: KuduWriteOptions = new KuduWriteOptions): Unit = {
- kuduContext.upsertRows(
- df,
- kuduTable,
- kuduWriteOptions
- )
- }
-
- def delete(
- df: DataFrame,
- kuduTable: String,
- kuduWriteOptions: KuduWriteOptions = new KuduWriteOptions): Unit = {
- kuduContext.deleteRows(
- df,
- kuduTable,
- kuduWriteOptions
- )
- }
-
-}
diff --git a/datasource/kudu/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala b/datasource/kudu/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala
deleted file mode 100644
index 2c24d8b..0000000
--- a/datasource/kudu/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/datasource/kudu/KuduDataSource.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kudu
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("kudu", "impala_kudu"))
-@sink(types = Array("kudu", "impala_kudu"))
-class KuduDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = ???
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ???
-}
diff --git a/datasource/kudu/src/test/resources/application.properties b/datasource/kudu/src/test/resources/application.properties
deleted file mode 100644
index 088a177..0000000
--- a/datasource/kudu/src/test/resources/application.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-etl.workflow.path=tasks
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-
-flyway.url=jdbc:mysql://localhost:2333/sharp_etl
-flyway.username=admin
-flyway.password=admin
-
-int_test.mysql.url=jdbc:mysql://localhost:2334/int_test
-int_test.mysql.driver=com.mysql.cj.jdbc.Driver
-int_test.mysql.user=admin
-int_test.mysql.password=admin
-int_test.mysql.fetchsize=1000
\ No newline at end of file
diff --git a/datasource/kudu/src/test/scala/com/thoughtworks/datasource/kudu/KuduDataSourceTest.scala b/datasource/kudu/src/test/scala/com/thoughtworks/datasource/kudu/KuduDataSourceTest.scala
deleted file mode 100644
index 8615f14..0000000
--- a/datasource/kudu/src/test/scala/com/thoughtworks/datasource/kudu/KuduDataSourceTest.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.github.sharpdata.sharpetl.datasource.kudu
-
-import com.github.sharpdata.sharpetl.datasource.kudu.KuduDataSource
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import org.scalatest.PrivateMethodTester
-import org.scalatest.funspec.AnyFunSpec
-
-class KuduDataSourceTest extends AnyFunSpec with PrivateMethodTester {
- describe("buildKuduLoadOptions") {
- for ((sourceType, value) <- Map("kudu" -> "temp", "impala_kudu" -> "impala::smc.temp")) {
- it(s"should build options correctly for source type ${sourceType}") {
- val sourceConfig = new DBDataSourceConfig()
- sourceConfig.setDataSourceType(sourceType)
- sourceConfig.setDbName("smc")
- sourceConfig.setTableName("temp")
- val sourceOptions = Map("testArg" -> "build")
- sourceConfig.setOptions(sourceOptions)
- val options =
- new KuduDataSource(null).buildKuduLoadOptions(sourceConfig)
-
- assert(options.size == 3)
- assert(options("testArg") == "build")
- assert(options("kudu.table") == value)
- }
- }
- }
-}
diff --git a/docker/README.md b/docker/README.md
deleted file mode 100644
index 9d16380..0000000
--- a/docker/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-
-## local hive env setup
-
-```bash
-cd docker
-docker compose up -d
-docker compose exec hive /opt/hive/bin/hive # if you want to run hive cli
-```
\ No newline at end of file
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
deleted file mode 100644
index c26a43b..0000000
--- a/docker/docker-compose.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-version: '3'
-services:
- hive:
- image: sharpetl/hive3:3.1.2
- ports:
- - "9083:9083"
- - "10000:10000"
- - "10002:10002"
- environment:
- - CONNECTION_URL=jdbc:mysql://mysql:3306/hive?useSSL=false
- - CONNECTION_USER_NAME=root
- - CONNECTION_PASSWORD=root
- - WAREHOUSE_DIR=file://${HOME}/Documents/warehouse
- - WAIT_HOSTS=mysql:3306
- depends_on:
- - mysql
- volumes:
- - ${HOME}/Documents/warehouse:${HOME}/Documents/warehouse
- mysql:
- image: mysql:5.7.28
- ports:
- - "3306:3306"
- volumes:
- - ${PWD}/mysql:/docker-entrypoint-initdb.d
- environment:
- - MYSQL_ROOT_PASSWORD=root
diff --git a/docker/mysql/init_db.sql b/docker/mysql/init_db.sql
deleted file mode 100644
index d3d0a4e..0000000
--- a/docker/mysql/init_db.sql
+++ /dev/null
@@ -1,2 +0,0 @@
-CREATE DATABASE sharp_etl;
-CREATE DATABASE hive;
\ No newline at end of file
diff --git a/flink/build.gradle b/flink/build.gradle
deleted file mode 100644
index 249eac7..0000000
--- a/flink/build.gradle
+++ /dev/null
@@ -1,345 +0,0 @@
-plugins {
- id "java-library"
- id "application"
- id "scala"
- id "com.github.alisiikh.scalastyle"
- id "com.github.johnrengelman.shadow" version "7.1.2"
- id "com.github.maiflai.scalatest"
- id "maven-publish"
-}
-
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
-
-group = 'com.github.sharpdata.sharpetl'
-sourceCompatibility = 1.8
-version = '0.2.0'
-
-
-publishing {
- publications {
- maven(MavenPublication) {
- artifactId = "sharp-etl-flink-${flinkVersion}_${scalaVersion}"
- afterEvaluate {
- //artifact(tasks.findByName("shadowJar"))
- artifact(tasks.findByName("sourceJar"))
- artifact(tasks.findByName("slimJar"))
- }
- }
- }
- repositories {
-// maven {
-// name 'maven-snapshots'
-// url "http://repo.maven.com/repository/maven-snapshots"
-// credentials {
-// username project.repoUser
-// password project.repoPassword
-// }
-// }
-
- maven {
- name = "GitHubPackages"
- url = uri("https://maven.pkg.github.com/SharpData/SharpETL")
- credentials {
- username = System.getenv("GITHUB_ACTOR")
- password = System.getenv("GITHUB_TOKEN")
- }
- }
- }
-}
-
-dependencies {
- implementation(project(":core"))
- implementation(project(":data-modeling"))
-
- if (scalaVersion == "2.12") {
- // --------------------------------------------------------------
- // Compile-time dependencies that should NOT be part of the
- // shadow (uber) jar and are provided in the lib folder of Flink
- // --------------------------------------------------------------
- implementation "org.apache.flink:flink-streaming-java:${flinkVersion}"
- implementation "org.apache.flink:flink-clients:${flinkVersion}"
- implementation "org.apache.flink:flink-connector-files:${flinkVersion}"
- implementation "org.apache.flink:flink-table-planner_${scalaVersion}:${flinkVersion}"
- implementation "org.apache.flink:flink-table-api-java:${flinkVersion}"
-
- // --------------------------------------------------------------
- // Dependencies that should be part of the shadow jar, e.g.
- // connectors. These must be in the flinkShadowJar configuration!
- // --------------------------------------------------------------
- //flinkShadowJar "org.apache.flink:flink-connector-kafka:${flinkVersion}"
- implementation "org.apache.flink:flink-connector-kafka:${flinkVersion}"
- implementation 'org.apache.paimon:paimon-flink-1.17:0.6.0-incubating'
- implementation 'org.apache.paimon:paimon-oss:0.6.0-incubating'
- implementation 'org.apache.flink:flink-connector-jdbc:3.1.1-1.17'
-
- implementation "io.circe:circe-yaml_$scalaVersion:0.15.0-RC1"
- implementation "io.circe:circe-generic_$scalaVersion:0.15.0-M1"
- implementation "io.circe:circe-generic-extras_$scalaVersion:0.14.3"
-
- implementation "com.google.guava:guava:29.0-jre"
- }
-
- runtimeOnly 'org.apache.hadoop:hadoop-hdfs:2.7.2'
-
-// runtimeOnly 'org.apache.flink:flink-shaded-hadoop-2-uber:2.4.1-10.0'
-// runtimeOnly 'org.apache.hadoop:hadoop-common:2.4.1'
-// runtimeOnly 'org.apache.hadoop:hadoop-auth:2.4.1'
-
-
- runtimeOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0"
- runtimeOnly "org.apache.logging.log4j:log4j-api:2.22.0"
- runtimeOnly "org.apache.logging.log4j:log4j-core:2.22.0"
-
- compileOnly 'org.projectlombok:lombok:1.18.22'
- annotationProcessor 'org.projectlombok:lombok:1.18.22'
- implementation "com.jcraft:jsch:0.1.55"
-
- implementation group: "org.apache.commons", name: "commons-lang3", version: "3.10"
-
-
- //JDBC
- implementation group: "mysql", name: "mysql-connector-java", version: "8.0.19"
- implementation group: "com.oracle.ojdbc", name: "ojdbc8", version: "19.3.0.0"
- implementation group: "com.microsoft.sqlserver", name: "mssql-jdbc", version: "9.4.0.jre8"
- implementation "net.sourceforge.jtds:jtds:1.3.1"
- implementation group: "com.ibm.informix", name: "jdbc", version: "4.10.14"
- implementation group: "org.postgresql", name: "postgresql", version: "42.7.1"
-
- implementation('io.github.coolbeevip:flyway-core:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation('io.github.coolbeevip:flyway-mysql:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation group: "org.mybatis", name: "mybatis", version: "3.5.4"
- implementation 'info.picocli:picocli:4.6.3'
-
- implementation("org.fusesource.jansi:jansi") {
- // old version here otherwise there will crash the JVM,
- // taken from https://github.com/fusesource/jansi/issues/66#issuecomment-1018386584
- version {
- strictly '1.18'
- }
- }
-
- testImplementation group: 'org.mockito', name: "mockito-scala_$scalaVersion", version: '1.16.29'
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-funspec_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalactic", name: "scalactic_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.pegdown", name: "pegdown", version: "1.4.2"
- testImplementation("com.github.tomakehurst:wiremock-jre8:2.27.0") {
- exclude group: "com.fasterxml.jackson.core"
- }
- testImplementation group: "org.junit.jupiter", name: "junit-jupiter-api", version: "5.6.2"
- testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:5.6.2"
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
-
- testImplementation "org.testcontainers:testcontainers:1.16.2"
- testImplementation "org.testcontainers:mysql:1.16.2"
- testImplementation "org.testcontainers:postgresql:1.17.2"
- testImplementation "org.testcontainers:mockserver:1.16.2"
- testImplementation "org.mock-server:mockserver-client-java:5.11.2"
-}
-
-configurations.all {
- exclude group: "org.apache.hadoop", module: "hadoop-annotations"
-}
-
-application {
- mainClassName = "com.github.sharpdata.sharpetl.flink.Entrypoint"
-}
-
-configurations.implementation.setCanBeResolved(true)
-configurations.api.setCanBeResolved(true)
-
-def addDeps(String dep) {
- println("Adding $dep into flink shadow jar...")
- shadowJar {
- dependencies {
- include(dependency(dep))
- }
- }
- def depSpec = createDepSpec(dep)
- Set resolvedDependencies = project.configurations.runtimeClasspath.getResolvedConfiguration().getFirstLevelModuleDependencies(depSpec)
- if (resolvedDependencies.isEmpty()) {
- println("ERROR: $dep not found in current project, please add $dep to project dependencies")
- }
- getResolvedArtifacts(resolvedDependencies).each { artifact ->
- println("Adding $artifact into shadow jar...")
- shadowJar {
- dependencies {
- include(dependency(artifact))
- }
- }
- }
-}
-
-def getResolvedArtifacts(Set artifacts) {
- Set resolvedArtifacts = [] as Set
- artifacts.each {
- // add current artifact
- resolvedArtifacts << "${it.moduleGroup}:${it.moduleName}:${it.moduleVersion}"
-
- // recursion to add children
- resolvedArtifacts += getResolvedArtifacts(it.children)
- }
- return resolvedArtifacts
-}
-
-def createDepSpec(String dep) {
- return new Spec() {
- @Override
- boolean isSatisfiedBy(Dependency dependency) {
- return dependency == project.dependencies.create(dep)
- }
- }
-}
-
-task slimJar(type: ShadowJar) {
- from(
- sourceSets.main.output,
- sourceSets.main.resources
- )
- configurations = [project.configurations.compileClasspath]
- dependencies {
- include(project(":core"))
- include(project(":data-modeling"))
-
- include dependency("commons-net:commons-net:3.1")
- }
- relocate "org.apache.commons.net", "com.github.sharpdata.sharpetl.commons.net"
- archiveFileName = "sharp-etl-flink-${flinkVersion}_${scalaVersion}-${version}.jar"
-}
-
-shadowJar {
- zip64 true
- classifier null
- dependencies {
- include(project(":core"))
- include(project(":data-modeling"))
-
- include dependency("org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0")
- include dependency("org.mvel:mvel2:2.4.13.Final")
- include dependency('org.jasypt:jasypt:1.9.3')
- include dependency("com.lihaoyi:fastparse_$scalaVersion:3.0.0")
- include dependency("io.github.classgraph:classgraph:4.8.149")
-
- include dependency("org.apache.httpcomponents:httpclient:4.5.12")
- include dependency("org.mybatis:mybatis:3.5.9")
- include dependency('com.zaxxer:HikariCP:2.6.1')
- include dependency('io.github.coolbeevip:flyway-core:9.15.2.2')
- include dependency('io.github.coolbeevip:flyway-mysql:9.15.2.2')
- include dependency("mysql:mysql-connector-java:8.0.19")
- include dependency("com.oracle.ojdbc:ojdbc8:19.3.0.0")
-
- addDeps("io.circe:circe-yaml_$scalaVersion:0.15.0-RC1")
- addDeps("io.circe:circe-generic_$scalaVersion:0.15.0-M1")
- addDeps("io.circe:circe-generic-extras_$scalaVersion:0.14.3")
-
- addDeps("com.google.guava:guava:29.0-jre")
- addDeps("com.google.guava:failureaccess:1.0.1")
- addDeps("org.apache.paimon:paimon-flink-1.17:0.6.0-incubating")
- addDeps("org.apache.paimon:paimon-oss:0.6.0-incubating")
-
- include dependency("org.postgresql:postgresql:42.2.14")
- include dependency("com.jcraft:jsch:0.1.55")
- include dependency("com.cloudera:ImpalaJDBC41:2.6.3")
- include dependency("com.microsoft.sqlserver:mssql-jdbc:9.4.0.jre8")
- include dependency('net.sourceforge.jtds:jtds:1.3.1')
- include dependency("com.ibm.informix:jdbc:4.10.14")
- include dependency("javax.mail:mail:1.4.7")
-
- addDeps('info.picocli:picocli:4.6.3')
- }
- relocate "com.google", "com.github.sharpdata.sharpetl.google"
- relocate "org.apache.commons.net", "com.github.sharpdata.sharpetl.commons.net"
- relocate "com.zaxxer.hikari", "com.github.sharpdata.sharpetl.hikari"
- archiveFileName = "sharp-etl-flink-standalone-${flinkVersion}_${scalaVersion}-${version}.jar"
- mergeServiceFiles {
- // https://github.com/flyway/flyway/issues/3482#issuecomment-1493367875
- setPath("META-INF/services/org.flywaydb.core.extensibility.Plugin")
- }
-}
-
-task clearJar(type: Delete) {
- delete 'build/libs/lib'
-}
-
-tasks.named('processTestResources') {
- duplicatesStrategy = 'EXCLUDE'
-}
-
-task copyDependencies(type: Copy) {
- from(project.parent.allprojects.configurations.compileClasspath)
- into('build/libs/lib')
- include("scala-collection-compat_$scalaVersion-2.6.0.jar")
- include("mvel2-2.4.13.Final.jar")
- include("fastparse_$scalaVersion-3.0.0.jar")
- include("classgraph-4.8.149.jar")
-
- include("jasypt-1.9.3.jar")
- include("httpclient-4.5.12.jar")
- include("mybatis-3.5.9.jar")
- include("HikariCP-2.6.1.jar")
- include("flyway-core-9.15.2.2.jar")
- include("flyway-core-9.15.2.2.jar")
- include("mysql-connector-java-8.0.19.jar")
- include("ojdbc8-19.3.0.0.jar")
- include("aliyun-sdk-oss-3.16.0.jar")
-
-
- include("circe-yaml_$scalaVersion-0.11.0-M1.jar")
- include("circe-core_$scalaVersion-0.12.0-M3.jar")
- include("circe-generic_$scalaVersion-0.12.0-M3.jar")
- include("circe-generic-extras_$scalaVersion-0.12.0-M3.jar")
- include("cats-core_$scalaVersion-2.0.0-M4.jar")
- include("cats-kernel_$scalaVersion-2.0.0-M4.jar")
- include("postgresql-42.2.14.jar")
- include("jsch-0.1.55.jar")
- include("ImpalaJDBC41-2.6.3.jar")
- include("mssql-jdbc-9.4.0.jre8.jar")
- include("jtds-1.3.1.jar")
- include("jdbc-4.10.14.jar") // informix
-
- include("json-path-2.6.0.jar")
- include("mail-1.4.7.jar")
-
- include("picocli-4.6.3.jar")
- include("lift-json_$scalaVersion-3.4.3")
-}
-
-task buildJars(type: Copy, dependsOn: [":flink:clearJar", ":flink:shadowJar", ":flink:slimJar", ":flink:copyDependencies"])
-
-task sourceJar(type: Jar) {
- classifier = 'sources'
- from sourceSets.main.allSource
-}
-
-scalastyle {
- config = file("${rootDir}/scalastyle_config.xml") // path to scalastyle config xml file
- failOnWarning = true
- sourceSets {
- test {
- skip = true
- }
- }
-}
-
-sourceSets {
- main {
- resources {
- srcDirs += [
- project(':core').sourceSets.main.resources
- ]
- }
- }
-
- test {
- resources {
- srcDirs += [
- project(':data-modeling').sourceSets.test.resources
- ]
- }
- //runtimeClasspath -= files(sourceSets.main.output.resourcesDir)
- }
-}
\ No newline at end of file
diff --git a/flink/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin b/flink/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
deleted file mode 100644
index 43e0dc6..0000000
--- a/flink/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
+++ /dev/null
@@ -1 +0,0 @@
-com.github.sharpdata.sharpetl.flink.extra.flyway.hive.HiveDatabaseType
\ No newline at end of file
diff --git a/flink/src/main/resources/application.properties b/flink/src/main/resources/application.properties
deleted file mode 100644
index 28f98cb..0000000
--- a/flink/src/main/resources/application.properties
+++ /dev/null
@@ -1,16 +0,0 @@
-etl.workflow.path=tasks
-etl.default.jobId.column=job_id
-etl.default.jobTime.column=job_time
-flyway.url=jdbc:flink_sharp_etl://localhost/sharp_etl
-flyway.catalog=paimon
-flyway.database=sharp_etl
-flyway.driver=com.github.sharpdata.sharpetl.flink.extra.driver.FlinkJdbcDriver
-flyway.warehouse=oss://sh-flink/warehouse
-flyway.endpoint=oss-cn-shanghai-internal.aliyuncs.com
-flyway.ak=AKAKAKAKAKAKA
-flyway.sk=SKSKSKSKSKSKSKSKSK
-
-flink.default.__table.exec.sort.non-temporal.enabled__=true
-flink.default.execution.runtime-mode=batch
-flink.default.sql-client.execution.result-mode=tableau
-flink.default.table.dml-sync=true
\ No newline at end of file
diff --git a/flink/src/main/resources/mybatis-config.xml b/flink/src/main/resources/mybatis-config.xml
deleted file mode 100644
index c215e80..0000000
--- a/flink/src/main/resources/mybatis-config.xml
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/flink/src/main/resources/quality-check.yaml b/flink/src/main/resources/quality-check.yaml
deleted file mode 100644
index 82c730c..0000000
--- a/flink/src/main/resources/quality-check.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-- dataCheckType: power null check
- rule: $column is NULL or $column = 'NULL' or $column = 'null' or $column = ''
- errorType: error
-- dataCheckType: null check
- rule: $column IS NULL
- errorType: error
-- dataCheckType: duplicated check
- rule: UDR.com.github.sharpdata.sharpetl.core.quality.udr.DuplicatedCheck
- errorType: warn
-- dataCheckType: negative check
- rule: $column = '-1'
- errorType: warn
\ No newline at end of file
diff --git a/flink/src/main/resources/tasks/hello_world.sql b/flink/src/main/resources/tasks/hello_world.sql
deleted file mode 100644
index 82eb9fa..0000000
--- a/flink/src/main/resources/tasks/hello_world.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- workflow=hello_world
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=define variable
--- source=temp
--- target=variables
-
-SELECT 'RESULT' AS `OUTPUT_COL`;
-
--- step=print SUCCESS to console
--- source=temp
--- target=console
-
-SELECT 'SUCCESS' AS `${OUTPUT_COL}`;
\ No newline at end of file
diff --git a/flink/src/main/resources/tasks/mysql_to_paimon.sql b/flink/src/main/resources/tasks/mysql_to_paimon.sql
deleted file mode 100644
index cb03656..0000000
--- a/flink/src/main/resources/tasks/mysql_to_paimon.sql
+++ /dev/null
@@ -1,44 +0,0 @@
--- workflow=mysql_to_paimon
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=define source catalog
--- source=ddl
--- target=do_nothing
-CREATE CATALOG sharp_etl_db
-WITH ( 'type' = 'jdbc',
- 'default-database' = 'sharp_etl',
- 'username' = 'root',
- 'password' = 'root',
- 'base-url' = 'jdbc:mysql://localhost:3306'
-);
-
--- step=define paimon sink table
--- source=ddl
--- target=do_nothing
-
-CREATE TABLE IF NOT EXISTS `paimon`.`default`.flyway_schema_history (
- `id` INT,
- `version` VARCHAR,
- `description` VARCHAR,
- `type` VARCHAR,
- `script` VARCHAR,
- `checksum` BIGINT,
- `installed_by` VARCHAR,
- `installed_on` TIMESTAMP,
- `execution_time` INT,
- `success` BOOLEAN,
- user_action_time AS PROCTIME()
-) WITH (
- 'connector' = 'paimon',
- 'tag.automatic-creation' = 'process-time',
- 'tag.creation-period' = 'daily'
-);
-
--- step=insert into paimon sink table
--- source=do_nothing
--- target=built_in
--- tableName=`paimon`.`default`.flyway_schema_history
-
-SELECT * FROM `sharp_etl_db`.`sharp_etl`.`flyway_schema_history`;
\ No newline at end of file
diff --git a/flink/src/main/resources/tasks/quality_check.sql b/flink/src/main/resources/tasks/quality_check.sql
deleted file mode 100644
index 936ba30..0000000
--- a/flink/src/main/resources/tasks/quality_check.sql
+++ /dev/null
@@ -1,47 +0,0 @@
--- workflow=test_dwd_with_quality_check
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=var setup
--- source=temp
--- target=variables
-select DATE_FORMAT(TO_TIMESTAMP('${DATA_RANGE_END}' , 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
- extract(hour from TO_TIMESTAMP('${DATA_RANGE_START}' , 'yyyy-MM-dd HH:mm:ss')) as `HOUR_END`,
- TO_TIMESTAMP('${DATA_RANGE_START}' , 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
-
--- step=read temp data
--- source=temp
--- options
--- idColumn=order_id
--- sortColumn=order_id
--- column.phone.qualityCheckRules=power null check
--- column.value.qualityCheckRules=negative check
--- target=console
-select 1212121242 as `order_id`,
- '11' as `phone`,
- '-1' as `value`,
- '${JOB_ID}' as `job_id`,
- '${EFFECTIVE_START_TIME}' as effective_start_time,
- '9999-01-01 00:00:00' as effective_end_time,
- '1' as is_active,
- '1' as is_latest,
- '${DATA_RANGE_START}' as idempotent_key,
- '${DATE_END}' as dw_insert_date
-
-UNION ALL select 1212121243 as `order_id`,
- 'null' as `phone`,
- '1' as `value`,
- '${JOB_ID}' as `job_id`,
- '${EFFECTIVE_START_TIME}' as effective_start_time,
- '9999-01-01 00:00:00' as effective_end_time,
- '1' as is_active,
- '1' as is_latest,
- '${DATA_RANGE_START}' as idempotent_key,
- '${DATE_END}' as dw_insert_date;
-
--- step=read check result
--- source=built_in
--- target=console
-select * from `paimon`.`sharp_etl`.`quality_check_log` where job_id='${JOB_ID}';
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/Entrypoint.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/Entrypoint.scala
deleted file mode 100644
index 92d5071..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/Entrypoint.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.github.sharpdata.sharpetl.flink
-
-import com.github.sharpdata.sharpetl.flink.cli.Command
-import picocli.CommandLine
-
-
-object Entrypoint {
- val errorHandler: CommandLine.IExecutionExceptionHandler =
- new CommandLine.IExecutionExceptionHandler() {
- def handleExecutionException(ex: Exception, commandLine: CommandLine, parseResult: CommandLine.ParseResult): Int = {
- println("Failed to execute job, exiting with error: " + ex.getMessage)
- ex.printStackTrace()
- commandLine.getCommandSpec.exitCodeOnExecutionException
- }
- }
-
- def main(args: Array[String]): Unit = {
- val code = new CommandLine(new Command()).setExecutionExceptionHandler(errorHandler).execute(
- args: _*
- )
- if (!succeed(code)) {
- println("Failed to execute job, exiting with code " + code)
- System.exit(code)
- }
- }
-
- private def succeed(code: Int) = {
- code == 0
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/cli/Command.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/cli/Command.scala
deleted file mode 100644
index eedd69c..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/cli/Command.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.cli
-
-import com.github.sharpdata.sharpetl.core.api.WfEvalResult.throwFirstException
-import com.github.sharpdata.sharpetl.core.api.{LogDrivenInterpreter, WfEvalResult}
-import com.github.sharpdata.sharpetl.core.cli.{BatchJobCommand, EncryptionCommand, SingleJobCommand}
-import com.github.sharpdata.sharpetl.core.notification.NotificationUtil
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRuleConfig.readQualityCheckRules
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.util.FlywayUtil.migrate
-import com.github.sharpdata.sharpetl.core.util._
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession.getFlinkInterpreter
-import com.github.sharpdata.sharpetl.modeling.cli.{GenerateDwdStepCommand, GenerateSqlFiles}
-import picocli.CommandLine
-
-
-@CommandLine.Command(name = "single-job")
-class SingleFlinkJobCommand extends SingleJobCommand {
- override def run(): Unit = {
- loggingJobParameters()
- ETLConfig.extraParam = extraParams
- ETLConfig.setPropertyPath(propertyPath, env)
- val etlDatabaseType = JDBCUtil.dbType
- val interpreter = getFlinkInterpreter(local, wfName, releaseResource, etlDatabaseType, readQualityCheckRules())
- //JavaVersionChecker.checkJavaVersion()
- try {
- migrate()
- val wfInterpretingResult: WfEvalResult = LogDrivenInterpreter(
- WorkflowReader.readWorkflow(wfName),
- interpreter,
- jobLogAccessor = jobLogAccessor,
- command = this
- ).eval()
- new NotificationUtil(jobLogAccessor).notify(Seq(wfInterpretingResult))
- throwFirstException(Seq(wfInterpretingResult))
- } catch {
- case e: Exception =>
- ETLLogger.error("Failed to execute job", e)
- throw e
- } finally {
- interpreter.close()
- }
- }
-}
-
-@CommandLine.Command(name = "batch-job")
-class BatchFlinkJobCommand extends BatchJobCommand {
- override def run(): Unit = {
- loggingJobParameters()
- ETLConfig.extraParam = extraParams
- ETLConfig.setPropertyPath(propertyPath, env)
- //JavaVersionChecker.checkJavaVersion()
- migrate()
- val etlDatabaseType = JDBCUtil.dbType
- // val logDrivenInterpreters = if (excelOptions != null) getJobsFromExcel(etlDatabaseType) else getInterpretersFromSqlFile(etlDatabaseType)
- val logDrivenInterpreters = getInterpretersFromSqlFile(etlDatabaseType)
- val batchJobResult: Seq[WfEvalResult] =
- try {
- logDrivenInterpreters.map(_.eval())
- } finally {
- logDrivenInterpreters.headOption.foreach(_.workflowInterpreter.close())
- }
- val failedCount = batchJobResult.map(_.jobLogs.count { it => it.isFailure() }).sum
- val skippedCount = batchJobResult.map(_.jobLogs.count { it => it.isSkipped() }).sum
- val successCount = batchJobResult.map(_.jobLogs.count { it => it.isSuccess() }).sum
-
- ETLLogger.info(
- s"""
- |Total jobs: ${logDrivenInterpreters.size}, success: $successCount, failed: $failedCount, skipped: $skippedCount
- |Details:
- |${batchJobResult.map(_.toString).mkString("\n\n")}
- |""".stripMargin)
- new NotificationUtil(jobLogAccessor).notify(batchJobResult)
- if (failedCount > 0) {
- throwFirstException(batchJobResult)
- }
- }
-
- def getInterpretersFromSqlFile(etlDatabaseType: String): Seq[LogDrivenInterpreter] = {
- sqlFileOptions.wfNames
- .map(wfName => {
- val interpreter = getFlinkInterpreter(local, wfName, releaseResource, etlDatabaseType, readQualityCheckRules())
- //JavaVersionChecker.checkJavaVersion()
- LogDrivenInterpreter(
- WorkflowReader.readWorkflow(wfName),
- interpreter,
- jobLogAccessor = jobLogAccessor,
- command = this
- )
- })
- }
-}
-
-@CommandLine.Command(
- subcommands = Array(
- classOf[SingleFlinkJobCommand],
- classOf[BatchFlinkJobCommand],
- classOf[GenerateSqlFiles],
- classOf[EncryptionCommand],
- classOf[GenerateDwdStepCommand]
- )
-)
-class Command extends Runnable {
-
- override def run(): Unit = ()
-
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/BuiltInDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/BuiltInDataSource.scala
deleted file mode 100644
index 08886e1..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/BuiltInDataSource.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.TableEnvironment
-
-@source(types = Array("built_in"))
-@sink(types = Array("built_in"))
-class BuiltInDataSource extends Sink[DataFrame] with Source[DataFrame, TableEnvironment] {
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val sql = s"INSERT INTO ${step.target.asInstanceOf[DBDataSourceConfig].getTableName} ${step.getSql}"
- println("executing sql:\n " + sql)
- ETLFlinkSession.batchEnv.executeSql(sql)
- }
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: TableEnvironment, variables: Variables): DataFrame = {
- println("executing sql:\n " + step.getSql)
- executionContext.sqlQuery(step.getSql)
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/ConsoleDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/ConsoleDataSource.scala
deleted file mode 100644
index 0fbb6a4..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/ConsoleDataSource.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-
-@sink(types = Array("console"))
-class ConsoleDataSource extends Sink[DataFrame] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- println("console output schema:")
- df.printSchema()
- println("explain plan:")
- println(df.explain())
- println("console output:")
-
- df.execute().print()
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DDLDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DDLDataSource.scala
deleted file mode 100644
index c84f865..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DDLDataSource.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.TableEnvironment
-
-@sink(types = Array("ddl"))
-@source(types = Array("ddl"))
-class DDLDataSource extends Sink[DataFrame] with Source[DataFrame, TableEnvironment] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- println("executing DDL: \n" + step.sql)
- ETLFlinkSession.batchEnv.executeSql(step.sql)
- }
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: TableEnvironment, variables: Variables): DataFrame = {
- println("executing DDL: \n" + step.sql)
- ETLFlinkSession.batchEnv.executeSql(step.sql)
- executionContext.sqlQuery("SELECT 'SUCCESS' AS `result`")
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DoNothingDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DoNothingDataSource.scala
deleted file mode 100644
index ba254ea..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/DoNothingDataSource.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.TableEnvironment
-
-@source(types = Array("do_nothing"))
-@sink(types = Array("do_nothing"))
-class DoNothingDataSource extends Sink[DataFrame] with Source[DataFrame, TableEnvironment]{
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ()
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: TableEnvironment, variables: Variables): DataFrame = {
- executionContext.sqlQuery("SELECT 'SUCCESS' AS `result`")
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/TempDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/TempDataSource.scala
deleted file mode 100644
index 6032e8e..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/TempDataSource.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.TableEnvironment
-
-@source(types = Array("temp"))
-@sink(types = Array("temp"))
-class TempDataSource extends Sink[DataFrame] with Source[DataFrame, TableEnvironment] {
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- ETLFlinkSession.batchEnv.createTemporaryView(step.target.asInstanceOf[DBDataSourceConfig].getTableName, df)
- }
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: TableEnvironment, variables: Variables): DataFrame = {
- println("executing sql:\n " + step.getSql)
- executionContext.sqlQuery(step.getSql)
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/VariablesDataSource.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/VariablesDataSource.scala
deleted file mode 100644
index 3e9a7c2..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/datasource/VariablesDataSource.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.VariablesUtil
-
-@source(types = Array("variables"))
-@sink(types = Array("variables"))
-class VariablesDataSource extends Sink[DataFrame] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- VariablesUtil.setVariables(df, variables)
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkDatabaseMetaData.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkDatabaseMetaData.scala
deleted file mode 100644
index 7032224..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkDatabaseMetaData.scala
+++ /dev/null
@@ -1,361 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-
-import java.sql.{Connection, DatabaseMetaData, ResultSet, RowIdLifetime}
-
-// scalastyle:off
-class FlinkDatabaseMetaData extends DatabaseMetaData {
- override def allProceduresAreCallable(): Boolean = true
-
- override def allTablesAreSelectable(): Boolean = true
-
- override def getURL: String = ""
-
- override def getUserName: String = ""
-
- override def isReadOnly: Boolean = false
-
- override def nullsAreSortedHigh(): Boolean = true
-
- override def nullsAreSortedLow(): Boolean = false
-
- override def nullsAreSortedAtStart(): Boolean = true
-
- override def nullsAreSortedAtEnd(): Boolean = false
-
- override def getDatabaseProductName: String = "flink_sharp_etl"
-
- override def getDatabaseProductVersion: String = "1.0"
-
- override def getDriverName: String = "com.github.sharpdata.sharpetl.flink.extra.driver.FlinkJdbcDriver"
-
- override def getDriverVersion: String = "0"
-
- override def getDriverMajorVersion: Int = 0
-
- override def getDriverMinorVersion: Int = 0
-
- override def usesLocalFiles(): Boolean = true
-
- override def usesLocalFilePerTable(): Boolean = true
-
- override def supportsMixedCaseIdentifiers(): Boolean = true
-
- override def storesUpperCaseIdentifiers(): Boolean = true
-
- override def storesLowerCaseIdentifiers(): Boolean = true
-
- override def storesMixedCaseIdentifiers(): Boolean = true
-
- override def supportsMixedCaseQuotedIdentifiers(): Boolean = true
-
- override def storesUpperCaseQuotedIdentifiers(): Boolean = true
-
- override def storesLowerCaseQuotedIdentifiers(): Boolean = true
-
- override def storesMixedCaseQuotedIdentifiers(): Boolean = true
-
- override def getIdentifierQuoteString: String = "`"
-
- override def getSQLKeywords: String = ???
-
- override def getNumericFunctions: String = ???
-
- override def getStringFunctions: String = ???
-
- override def getSystemFunctions: String = ???
-
- override def getTimeDateFunctions: String = ???
-
- override def getSearchStringEscape: String = ???
-
- override def getExtraNameCharacters: String = ???
-
- override def supportsAlterTableWithAddColumn(): Boolean = true
-
- override def supportsAlterTableWithDropColumn(): Boolean = true
-
- override def supportsColumnAliasing(): Boolean = true
-
- override def nullPlusNonNullIsNull(): Boolean = true
-
- override def supportsConvert(): Boolean = true
-
- override def supportsConvert(fromType: Int, toType: Int): Boolean = true
-
- override def supportsTableCorrelationNames(): Boolean = true
-
- override def supportsDifferentTableCorrelationNames(): Boolean = true
-
- override def supportsExpressionsInOrderBy(): Boolean = true
-
- override def supportsOrderByUnrelated(): Boolean = true
-
- override def supportsGroupBy(): Boolean = true
-
- override def supportsGroupByUnrelated(): Boolean = true
-
- override def supportsGroupByBeyondSelect(): Boolean = true
-
- override def supportsLikeEscapeClause(): Boolean = true
-
- override def supportsMultipleResultSets(): Boolean = true
-
- override def supportsMultipleTransactions(): Boolean = true
-
- override def supportsNonNullableColumns(): Boolean = true
-
- override def supportsMinimumSQLGrammar(): Boolean = true
-
- override def supportsCoreSQLGrammar(): Boolean = true
-
- override def supportsExtendedSQLGrammar(): Boolean = true
-
- override def supportsANSI92EntryLevelSQL(): Boolean = true
-
- override def supportsANSI92IntermediateSQL(): Boolean = true
-
- override def supportsANSI92FullSQL(): Boolean = true
-
- override def supportsIntegrityEnhancementFacility(): Boolean = true
-
- override def supportsOuterJoins(): Boolean = true
-
- override def supportsFullOuterJoins(): Boolean = true
-
- override def supportsLimitedOuterJoins(): Boolean = true
-
- override def getSchemaTerm: String = ???
-
- override def getProcedureTerm: String = ???
-
- override def getCatalogTerm: String = ???
-
- override def isCatalogAtStart: Boolean = true
-
- override def getCatalogSeparator: String = ???
-
- override def supportsSchemasInDataManipulation(): Boolean = true
-
- override def supportsSchemasInProcedureCalls(): Boolean = true
-
- override def supportsSchemasInTableDefinitions(): Boolean = true
-
- override def supportsSchemasInIndexDefinitions(): Boolean = true
-
- override def supportsSchemasInPrivilegeDefinitions(): Boolean = true
-
- override def supportsCatalogsInDataManipulation(): Boolean = true
-
- override def supportsCatalogsInProcedureCalls(): Boolean = true
-
- override def supportsCatalogsInTableDefinitions(): Boolean = true
-
- override def supportsCatalogsInIndexDefinitions(): Boolean = true
-
- override def supportsCatalogsInPrivilegeDefinitions(): Boolean = true
-
- override def supportsPositionedDelete(): Boolean = true
-
- override def supportsPositionedUpdate(): Boolean = true
-
- override def supportsSelectForUpdate(): Boolean = true
-
- override def supportsStoredProcedures(): Boolean = true
-
- override def supportsSubqueriesInComparisons(): Boolean = true
-
- override def supportsSubqueriesInExists(): Boolean = true
-
- override def supportsSubqueriesInIns(): Boolean = true
-
- override def supportsSubqueriesInQuantifieds(): Boolean = true
-
- override def supportsCorrelatedSubqueries(): Boolean = true
-
- override def supportsUnion(): Boolean = true
-
- override def supportsUnionAll(): Boolean = true
-
- override def supportsOpenCursorsAcrossCommit(): Boolean = true
-
- override def supportsOpenCursorsAcrossRollback(): Boolean = true
-
- override def supportsOpenStatementsAcrossCommit(): Boolean = true
-
- override def supportsOpenStatementsAcrossRollback(): Boolean = true
-
- override def getMaxBinaryLiteralLength: Int = 0
-
- override def getMaxCharLiteralLength: Int = 0
-
- override def getMaxColumnNameLength: Int = 0
-
- override def getMaxColumnsInGroupBy: Int = 0
-
- override def getMaxColumnsInIndex: Int = 0
-
- override def getMaxColumnsInOrderBy: Int = 0
-
- override def getMaxColumnsInSelect: Int = 0
-
- override def getMaxColumnsInTable: Int = 0
-
- override def getMaxConnections: Int = 0
-
- override def getMaxCursorNameLength: Int = 0
-
- override def getMaxIndexLength: Int = 0
-
- override def getMaxSchemaNameLength: Int = 0
-
- override def getMaxProcedureNameLength: Int = 0
-
- override def getMaxCatalogNameLength: Int = 0
-
- override def getMaxRowSize: Int = 0
-
- override def doesMaxRowSizeIncludeBlobs(): Boolean = true
-
- override def getMaxStatementLength: Int = 0
-
- override def getMaxStatements: Int = 0
-
- override def getMaxTableNameLength: Int = 0
-
- override def getMaxTablesInSelect: Int = 0
-
- override def getMaxUserNameLength: Int = 0
-
- override def getDefaultTransactionIsolation: Int = 0
-
- override def supportsTransactions(): Boolean = true
-
- override def supportsTransactionIsolationLevel(level: Int): Boolean = true
-
- override def supportsDataDefinitionAndDataManipulationTransactions(): Boolean = true
-
- override def supportsDataManipulationTransactionsOnly(): Boolean = true
-
- override def dataDefinitionCausesTransactionCommit(): Boolean = true
-
- override def dataDefinitionIgnoredInTransactions(): Boolean = true
-
- override def getProcedures(catalog: String, schemaPattern: String, procedureNamePattern: String): ResultSet = ???
-
- override def getProcedureColumns(catalog: String, schemaPattern: String, procedureNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getTables(catalog: String, schemaPattern: String, tableNamePattern: String, types: Array[String]): ResultSet = ???
-
- override def getSchemas: ResultSet = ???
-
- override def getCatalogs: ResultSet = ???
-
- override def getTableTypes: ResultSet = ???
-
- override def getColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getColumnPrivileges(catalog: String, schema: String, table: String, columnNamePattern: String): ResultSet = ???
-
- override def getTablePrivileges(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ???
-
- override def getBestRowIdentifier(catalog: String, schema: String, table: String, scope: Int, nullable: Boolean): ResultSet = ???
-
- override def getVersionColumns(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getPrimaryKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getImportedKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getExportedKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getCrossReference(parentCatalog: String, parentSchema: String, parentTable: String, foreignCatalog: String, foreignSchema: String, foreignTable: String): ResultSet = ???
-
- override def getTypeInfo: ResultSet = ???
-
- override def getIndexInfo(catalog: String, schema: String, table: String, unique: Boolean, approximate: Boolean): ResultSet = ???
-
- override def supportsResultSetType(`type`: Int): Boolean = true
-
- override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = true
-
- override def ownUpdatesAreVisible(`type`: Int): Boolean = true
-
- override def ownDeletesAreVisible(`type`: Int): Boolean = true
-
- override def ownInsertsAreVisible(`type`: Int): Boolean = true
-
- override def othersUpdatesAreVisible(`type`: Int): Boolean = true
-
- override def othersDeletesAreVisible(`type`: Int): Boolean = true
-
- override def othersInsertsAreVisible(`type`: Int): Boolean = true
-
- override def updatesAreDetected(`type`: Int): Boolean = true
-
- override def deletesAreDetected(`type`: Int): Boolean = true
-
- override def insertsAreDetected(`type`: Int): Boolean = true
-
- override def supportsBatchUpdates(): Boolean = true
-
- override def getUDTs(catalog: String, schemaPattern: String, typeNamePattern: String, types: Array[Int]): ResultSet = ???
-
- override def getConnection: Connection = ???
-
- override def supportsSavepoints(): Boolean = true
-
- override def supportsNamedParameters(): Boolean = true
-
- override def supportsMultipleOpenResults(): Boolean = true
-
- override def supportsGetGeneratedKeys(): Boolean = true
-
- override def getSuperTypes(catalog: String, schemaPattern: String, typeNamePattern: String): ResultSet = ???
-
- override def getSuperTables(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ???
-
- override def getAttributes(catalog: String, schemaPattern: String, typeNamePattern: String, attributeNamePattern: String): ResultSet = ???
-
- override def supportsResultSetHoldability(holdability: Int): Boolean = true
-
- override def getResultSetHoldability: Int = 0
-
- override def getDatabaseMajorVersion: Int = 0
-
- override def getDatabaseMinorVersion: Int = 0
-
- override def getJDBCMajorVersion: Int = 0
-
- override def getJDBCMinorVersion: Int = 0
-
- override def getSQLStateType: Int = 0
-
- override def locatorsUpdateCopy(): Boolean = true
-
- override def supportsStatementPooling(): Boolean = true
-
- override def getRowIdLifetime: RowIdLifetime = ???
-
- override def getSchemas(catalog: String, schemaPattern: String): ResultSet = ???
-
- override def supportsStoredFunctionsUsingCallSyntax(): Boolean = true
-
- override def autoCommitFailureClosesAllResultSets(): Boolean = true
-
- override def getClientInfoProperties: ResultSet = ???
-
- override def getFunctions(catalog: String, schemaPattern: String, functionNamePattern: String): ResultSet = ???
-
- override def getFunctionColumns(catalog: String, schemaPattern: String, functionNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getPseudoColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def generatedKeyAlwaysReturned(): Boolean = true
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = true
-}
-
-// scalastyle:on
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcConnection.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcConnection.scala
deleted file mode 100644
index b333431..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcConnection.scala
+++ /dev/null
@@ -1,120 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLWarning, SQLXML, Savepoint, Statement, Struct}
-import java.util.Properties
-import java.util.concurrent.Executor
-import java.{sql, util}
-import scala.jdk.CollectionConverters._
-
-// scalastyle:off
-class FlinkJdbcConnection extends Connection {
-
- override def createStatement(): Statement = new FlinkJdbcStatement()
-
- override def prepareStatement(sql: String): PreparedStatement = new FlinkJdbcPreparedStatement(sql)
-
- override def prepareCall(sql: String): CallableStatement = null
-
- override def nativeSQL(sql: String): String = sql
-
- override def setAutoCommit(autoCommit: Boolean): Unit = ()
-
- override def getAutoCommit: Boolean = false
-
- override def commit(): Unit = ()
-
- override def rollback(): Unit = ()
-
- override def close(): Unit = ()
-
- override def isClosed: Boolean = false
-
- override def getMetaData: DatabaseMetaData = new FlinkDatabaseMetaData()
-
- override def setReadOnly(readOnly: Boolean): Unit = ()
-
- override def isReadOnly: Boolean = false
-
- override def setCatalog(catalog: String): Unit = ()
-
- override def getCatalog: String = ""
-
- override def setTransactionIsolation(level: Int): Unit = ()
-
- override def getTransactionIsolation: Int = 0
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = null
-
- override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = null
-
- override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = null
-
- override def getTypeMap: util.Map[String, Class[_]] = Map.empty[String, Class[_]].asJava
-
- override def setTypeMap(map: util.Map[String, Class[_]]): Unit = ()
-
- override def setHoldability(holdability: Int): Unit = ()
-
- override def getHoldability: Int = 0
-
- override def setSavepoint(): Savepoint = null
-
- override def setSavepoint(name: String): Savepoint = null
-
- override def rollback(savepoint: Savepoint): Unit = ()
-
- override def releaseSavepoint(savepoint: Savepoint): Unit = ()
-
- override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = null
-
- override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = null
-
- override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = null
-
- override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = null
-
- override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = null
-
- override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = null
-
- override def createClob(): Clob = null
-
- override def createBlob(): Blob = null
-
- override def createNClob(): NClob = null
-
- override def createSQLXML(): SQLXML = null
-
- override def isValid(timeout: Int): Boolean = false
-
- override def setClientInfo(name: String, value: String): Unit = ()
-
- override def setClientInfo(properties: Properties): Unit = ()
-
- override def getClientInfo(name: String): String = ""
-
- override def getClientInfo: Properties = null
-
- override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = null
-
- override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = null
-
- override def setSchema(schema: String): Unit = ()
-
- override def getSchema: String = ""
-
- override def abort(executor: Executor): Unit = ()
-
- override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = ()
-
- override def getNetworkTimeout: Int = 0
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-// scalastyle:on
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcDriver.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcDriver.scala
deleted file mode 100644
index 03f679a..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcDriver.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import java.sql.{Connection, Driver, DriverManager, DriverPropertyInfo, SQLException}
-import java.util.Properties
-import java.util.logging.Logger
-
-// scalastyle:off
-class FlinkJdbcDriver extends Driver {
-
- private var registered = false
-
- private def load() = {
- try if (!registered) {
- registered = true
- DriverManager.registerDriver(FlinkJdbcDriver.INSTANCE)
- }
- catch {
- case e: SQLException =>
- e.printStackTrace()
- }
- FlinkJdbcDriver.INSTANCE
- }
-
- override def connect(url: String, info: Properties): Connection = new FlinkJdbcConnection()
-
- override def acceptsURL(url: String): Boolean = if (url.contains("flink")) true else false
-
- override def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = Array.empty
-
- override def getMajorVersion: Int = 0
-
- override def getMinorVersion: Int = 0
-
- override def jdbcCompliant(): Boolean = false
-
- override def getParentLogger: Logger = null
-}
-
-object FlinkJdbcDriver {
- val INSTANCE = new FlinkJdbcDriver()
- INSTANCE.load()
-}
-
-// scalastyle:on
\ No newline at end of file
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcPreparedStatement.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcPreparedStatement.scala
deleted file mode 100644
index c7ea499..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcPreparedStatement.scala
+++ /dev/null
@@ -1,268 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-
-import java.io.{InputStream, Reader}
-import java.net.URL
-import java.sql._
-import java.time.LocalDateTime
-import java.util.Calendar
-import scala.collection.mutable
-
-// scalastyle:off
-class FlinkJdbcPreparedStatement(val sql: String) extends PreparedStatement {
- private var resultSet: FlinkJdbcResultSet = null
-
- private val parameterMetaData = mutable.Map[Int, Any]()
-
- def escape(value: String): String = value.replace("'", "")
-
- def buildSql: String = {
- parameterMetaData.toList.sortBy(_._1)
- .foldLeft(sql) {
- case (accSql, (_, value)) =>
- value match {
- case _ if !accSql.contains("?") => accSql
- case _: Int | _: Boolean => accSql.replaceFirst("\\?", value.toString)
- case _: String => accSql.replaceFirst("\\?", s"\'${escape(value.toString)}\'")
- case time: LocalDateTime => accSql.replaceFirst("\\?", s"\'${time.format(L_YYYY_MM_DD_HH_MM_SS)}\'")
- case _ => accSql.replaceFirst("\\?", s"\'${escape(value.toString)}\'")
- }
- }
- }
-
- override def executeQuery(): ResultSet = {
- println(s"[DRIVER] executing sql $buildSql")
- new FlinkJdbcResultSet(ETLFlinkSession.batchEnv.sqlQuery(buildSql), this)
- }
-
- override def executeUpdate(): Int = {
- println(s"[DRIVER] executing sql $buildSql")
- ETLFlinkSession.batchEnv.executeSql(buildSql)
- //ETLFlinkSession.batchEnv.executeSql(buildSql)
- 0
- }
-
- override def setNull(parameterIndex: Int, sqlType: Int): Unit = parameterMetaData.put(parameterIndex, 0)
-
- override def setBoolean(parameterIndex: Int, x: Boolean): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setByte(parameterIndex: Int, x: Byte): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setShort(parameterIndex: Int, x: Short): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setInt(parameterIndex: Int, x: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setLong(parameterIndex: Int, x: Long): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setFloat(parameterIndex: Int, x: Float): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setDouble(parameterIndex: Int, x: Double): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setString(parameterIndex: Int, x: String): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBytes(parameterIndex: Int, x: scala.Array[Byte]): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setDate(parameterIndex: Int, x: Date): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTime(parameterIndex: Int, x: Time): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def clearParameters(): Unit = parameterMetaData.clear()
-
- override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setObject(parameterIndex: Int, x: Any): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def execute(): Boolean = {
- executeUpdate()
- true
- }
-
- override def addBatch(): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = ()
-
- override def setRef(parameterIndex: Int, x: Ref): Unit = ()
-
- override def setBlob(parameterIndex: Int, x: Blob): Unit = ()
-
- override def setClob(parameterIndex: Int, x: Clob): Unit = ()
-
- override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = ()
-
- override def getMetaData: ResultSetMetaData = ???
-
- override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = {
- parameterMetaData.put(parameterIndex, x)
- }
-
- override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = parameterMetaData.put(parameterIndex, null)
-
- override def setURL(parameterIndex: Int, x: URL): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def getParameterMetaData: ParameterMetaData = ??? //TODO "PreparedStatement.setTimestamp threw a NullPointerException if getParameterMetaData() was called before the statement was executed. This fix adds the missing null checks to getParameterMetaData() to avoid the exception."
-
- override def setRowId(parameterIndex: Int, x: RowId): Unit = ()
-
- override def setNString(parameterIndex: Int, value: String): Unit = ()
-
- override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = ()
-
- override def setNClob(parameterIndex: Int, value: NClob): Unit = ()
-
- override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = ()
-
- override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = ()
-
- override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int, scaleOrLength: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = ()
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = ()
-
- override def setClob(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = ()
-
- override def setNClob(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def executeQuery(sql: String): ResultSet = ???
-
- override def executeUpdate(sql: String): Int = 0
-
- override def close(): Unit = ()
-
- override def getMaxFieldSize: Int = 0
-
- override def setMaxFieldSize(max: Int): Unit = ()
-
- override def getMaxRows: Int = 0
-
- override def setMaxRows(max: Int): Unit = ()
-
- override def setEscapeProcessing(enable: Boolean): Unit = ()
-
- override def getQueryTimeout: Int = 0
-
- override def setQueryTimeout(seconds: Int): Unit = ()
-
- override def cancel(): Unit = ()
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def setCursorName(name: String): Unit = ()
-
- override def execute(sql: String): Boolean = {
- this.resultSet = new FlinkJdbcResultSet(ETLFlinkSession.batchEnv.sqlQuery(buildSql), this)
- true
- }
-
- override def getResultSet: ResultSet = new FlinkJdbcResultSet(ETLFlinkSession.batchEnv.sqlQuery(buildSql), this)
-
- override def getUpdateCount: Int = -1
-
- override def getMoreResults: Boolean = !this.resultSet.alreadyTheLast()
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getResultSetConcurrency: Int = 0
-
- override def getResultSetType: Int = 0
-
- override def addBatch(sql: String): Unit = ()
-
- override def clearBatch(): Unit = ()
-
- override def executeBatch(): scala.Array[Int] = ???
-
- override def getConnection: Connection = ???
-
- override def getMoreResults(current: Int): Boolean = !this.resultSet.alreadyTheLast()
-
- override def getGeneratedKeys: ResultSet = ???
-
- override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- 0
- }
-
- override def executeUpdate(sql: String, columnIndexes: scala.Array[Int]): Int = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- 0
- }
-
- override def executeUpdate(sql: String, columnNames: scala.Array[String]): Int = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- 0
- }
-
- override def execute(sql: String, autoGeneratedKeys: Int): Boolean = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- true
- }
-
- override def execute(sql: String, columnIndexes: scala.Array[Int]): Boolean = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- true
- }
-
- override def execute(sql: String, columnNames: scala.Array[String]): Boolean = {
- ETLFlinkSession.batchEnv.executeSql(sql)
- true
- }
-
- override def getResultSetHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def setPoolable(poolable: Boolean): Unit = ()
-
- override def isPoolable: Boolean = false
-
- override def closeOnCompletion(): Unit = ()
-
- override def isCloseOnCompletion: Boolean = false
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-
-// scalastyle:on
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSet.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSet.scala
deleted file mode 100644
index 18691f1..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSet.scala
+++ /dev/null
@@ -1,439 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.types.Row
-
-import java.io.{InputStream, Reader}
-import java.net.URL
-import java.{sql, util}
-import java.sql.{Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLWarning, SQLXML, Statement, Time, Timestamp}
-import java.time.LocalDateTime
-import java.util.Calendar
-import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`
-
-// scalastyle:off
-class FlinkJdbcResultSet(val data: DataFrame, val statement: Statement) extends ResultSet {
-
- private val datas = data.execute().collect()
- private var currentData: Row = null
-
- private val fields = data.getResolvedSchema.getColumns.map(_.getName).toList
-
- def alreadyTheLast() = (currentData == null) || !datas.hasNext
-
- override def next(): Boolean = {
- val hasNext = datas.hasNext
- if (hasNext) {
- currentData = datas.next
- }
- hasNext
- }
-
- override def close(): Unit = ()
-
- override def wasNull(): Boolean = false
-
- override def getString(columnIndex: Int): String = currentData.getField(columnIndex - 1).toString
-
- override def getBoolean(columnIndex: Int): Boolean = currentData.getField(columnIndex - 1).asInstanceOf[Boolean]
-
- override def getByte(columnIndex: Int): Byte = currentData.getField(columnIndex - 1).asInstanceOf[Byte]
-
- override def getShort(columnIndex: Int): Short = currentData.getField(columnIndex - 1).asInstanceOf[Short]
-
- override def getInt(columnIndex: Int): Int = {
- try {
- currentData.getField(columnIndex - 1).asInstanceOf[Int]
- } catch {
- case _: Exception => 0
- }
- }
-
- override def getLong(columnIndex: Int): Long = currentData.getField(columnIndex - 1).asInstanceOf[Long]
-
- override def getFloat(columnIndex: Int): Float = currentData.getField(columnIndex - 1).asInstanceOf[Float]
-
- override def getDouble(columnIndex: Int): Double = currentData.getField(columnIndex - 1).asInstanceOf[Double]
-
- override def getBigDecimal(columnIndex: Int, scale: Int): java.math.BigDecimal = currentData.getField(columnIndex - 1).asInstanceOf[java.math.BigDecimal]
-
- override def getBytes(columnIndex: Int): Array[Byte] = Array(currentData.getField(columnIndex - 1).asInstanceOf[Byte])
-
- override def getDate(columnIndex: Int): Date = currentData.getField(columnIndex - 1).asInstanceOf[Date]
-
- override def getTime(columnIndex: Int): Time = ??? //currentData.getTimestamp(columnIndex -1)
-
- override def getTimestamp(columnIndex: Int): Timestamp = currentData.getField(columnIndex - 1) match {
- case ldt: LocalDateTime => Timestamp.valueOf(ldt)
- case _ => currentData.getField(columnIndex - 1).asInstanceOf[Timestamp]
- }
-
- override def getAsciiStream(columnIndex: Int): InputStream = ???
-
- override def getUnicodeStream(columnIndex: Int): InputStream = ???
-
- override def getBinaryStream(columnIndex: Int): InputStream = ???
-
- override def getString(columnLabel: String): String = currentData.getField(columnLabel).toString
-
- override def getBoolean(columnLabel: String): Boolean = currentData.getField(columnLabel).asInstanceOf[Boolean]
-
- override def getByte(columnLabel: String): Byte = currentData.getField(columnLabel).asInstanceOf[Byte]
-
- override def getShort(columnLabel: String): Short = currentData.getField(columnLabel).asInstanceOf[Short]
-
- override def getInt(columnLabel: String): Int = currentData.getField(columnLabel).asInstanceOf[Int]
-
- override def getLong(columnLabel: String): Long = currentData.getField(columnLabel).asInstanceOf[Long]
-
- override def getFloat(columnLabel: String): Float = currentData.getField(columnLabel).asInstanceOf[Float]
-
- override def getDouble(columnLabel: String): Double = currentData.getField(columnLabel).asInstanceOf[Double]
-
- override def getBigDecimal(columnLabel: String, scale: Int): java.math.BigDecimal = currentData.getField(columnLabel).asInstanceOf[java.math.BigDecimal]
-
- override def getBytes(columnLabel: String): Array[Byte] = currentData.getField(columnLabel).asInstanceOf[Array[Byte]]
-
- override def getDate(columnLabel: String): Date = currentData.getField(columnLabel).asInstanceOf[Date]
-
- override def getTime(columnLabel: String): Time = currentData.getField(columnLabel).asInstanceOf[Time]
-
- override def getTimestamp(columnLabel: String): Timestamp = currentData.getField(columnLabel).asInstanceOf[Timestamp]
-
- override def getAsciiStream(columnLabel: String): InputStream = ???
-
- override def getUnicodeStream(columnLabel: String): InputStream = ???
-
- override def getBinaryStream(columnLabel: String): InputStream = ???
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def getCursorName: String = ""
-
- override def getMetaData: ResultSetMetaData = new FlinkJdbcResultSetMetaData(data)
-
- override def getObject(columnIndex: Int): AnyRef = null
-
- override def getObject(columnLabel: String): AnyRef = null
-
- override def findColumn(columnLabel: String): Int = fields.indexOf(columnLabel)
-
- override def getCharacterStream(columnIndex: Int): Reader = ???
-
- override def getCharacterStream(columnLabel: String): Reader = ???
-
- override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = currentData.getField(columnIndex - 1).asInstanceOf[java.math.BigDecimal]
-
- override def getBigDecimal(columnLabel: String): java.math.BigDecimal = currentData.getField(columnLabel).asInstanceOf[java.math.BigDecimal]
-
- override def isBeforeFirst: Boolean = false
-
- override def isAfterLast: Boolean = false
-
- override def isFirst: Boolean = false
-
- override def isLast: Boolean = false
-
- override def beforeFirst(): Unit = ()
-
- override def afterLast(): Unit = ()
-
- override def first(): Boolean = false
-
- override def last(): Boolean = false
-
- override def getRow: Int = 0
-
- override def absolute(row: Int): Boolean = false
-
- override def relative(rows: Int): Boolean = false
-
- override def previous(): Boolean = false
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getType: Int = 0
-
- override def getConcurrency: Int = 0
-
- override def rowUpdated(): Boolean = ???
-
- override def rowInserted(): Boolean = ???
-
- override def rowDeleted(): Boolean = ???
-
- override def updateNull(columnIndex: Int): Unit = ()
-
- override def updateBoolean(columnIndex: Int, x: Boolean): Unit = ()
-
- override def updateByte(columnIndex: Int, x: Byte): Unit = ()
-
- override def updateShort(columnIndex: Int, x: Short): Unit = ()
-
- override def updateInt(columnIndex: Int, x: Int): Unit = ()
-
- override def updateLong(columnIndex: Int, x: Long): Unit = ()
-
- override def updateFloat(columnIndex: Int, x: Float): Unit = ()
-
- override def updateDouble(columnIndex: Int, x: Double): Unit = ()
-
- override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = ()
-
- override def updateString(columnIndex: Int, x: String): Unit = ()
-
- override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = ()
-
- override def updateTime(columnIndex: Int, x: Time): Unit = ()
-
- override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = ()
-
- override def updateObject(columnIndex: Int, x: Any, scaleOrLength: Int): Unit = ()
-
- override def updateObject(columnIndex: Int, x: Any): Unit = ()
-
- override def updateNull(columnLabel: String): Unit = ()
-
- override def updateBoolean(columnLabel: String, x: Boolean): Unit = ()
-
- override def updateByte(columnLabel: String, x: Byte): Unit = ()
-
- override def updateShort(columnLabel: String, x: Short): Unit = ()
-
- override def updateInt(columnLabel: String, x: Int): Unit = ()
-
- override def updateLong(columnLabel: String, x: Long): Unit = ()
-
- override def updateFloat(columnLabel: String, x: Float): Unit = ()
-
- override def updateDouble(columnLabel: String, x: Double): Unit = ()
-
- override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = ()
-
- override def updateString(columnLabel: String, x: String): Unit = ()
-
- override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = ()
-
- override def updateTime(columnLabel: String, x: Time): Unit = ()
-
- override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = ()
-
- override def updateObject(columnLabel: String, x: Any, scaleOrLength: Int): Unit = ()
-
- override def updateObject(columnLabel: String, x: Any): Unit = ()
-
- override def insertRow(): Unit = ()
-
- override def updateRow(): Unit = ()
-
- override def deleteRow(): Unit = ()
-
- override def refreshRow(): Unit = ()
-
- override def cancelRowUpdates(): Unit = ()
-
- override def moveToInsertRow(): Unit = ()
-
- override def moveToCurrentRow(): Unit = ()
-
- override def getStatement: Statement = statement
-
- override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]): AnyRef = null
-
- override def getRef(columnIndex: Int): Ref = null
-
- override def getBlob(columnIndex: Int): Blob = ???
-
- override def getClob(columnIndex: Int): Clob = ???
-
- override def getArray(columnIndex: Int): sql.Array = ???
-
- override def getObject(columnLabel: String, map: util.Map[String, Class[_]]): AnyRef = null
-
- override def getRef(columnLabel: String): Ref = null
-
- override def getBlob(columnLabel: String): Blob = ???
-
- override def getClob(columnLabel: String): Clob = ???
-
- override def getArray(columnLabel: String): sql.Array = ???
-
- override def getDate(columnIndex: Int, cal: Calendar): Date = ???
-
- override def getDate(columnLabel: String, cal: Calendar): Date = ???
-
- override def getTime(columnIndex: Int, cal: Calendar): Time = ???
-
- override def getTime(columnLabel: String, cal: Calendar): Time = ???
-
- override def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = ???
-
- override def getTimestamp(columnLabel: String, cal: Calendar): Timestamp = ???
-
- override def getURL(columnIndex: Int): URL = ???
-
- override def getURL(columnLabel: String): URL = ???
-
- override def updateRef(columnIndex: Int, x: Ref): Unit = ()
-
- override def updateRef(columnLabel: String, x: Ref): Unit = ()
-
- override def updateBlob(columnIndex: Int, x: Blob): Unit = ()
-
- override def updateBlob(columnLabel: String, x: Blob): Unit = ()
-
- override def updateClob(columnIndex: Int, x: Clob): Unit = ()
-
- override def updateClob(columnLabel: String, x: Clob): Unit = ()
-
- override def updateArray(columnIndex: Int, x: sql.Array): Unit = ()
-
- override def updateArray(columnLabel: String, x: sql.Array): Unit = ()
-
- override def getRowId(columnIndex: Int): RowId = ???
-
- override def getRowId(columnLabel: String): RowId = ???
-
- override def updateRowId(columnIndex: Int, x: RowId): Unit = ()
-
- override def updateRowId(columnLabel: String, x: RowId): Unit = ()
-
- override def getHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def updateNString(columnIndex: Int, nString: String): Unit = ()
-
- override def updateNString(columnLabel: String, nString: String): Unit = ()
-
- override def updateNClob(columnIndex: Int, nClob: NClob): Unit = ()
-
- override def updateNClob(columnLabel: String, nClob: NClob): Unit = ()
-
- override def getNClob(columnIndex: Int): NClob = ???
-
- override def getNClob(columnLabel: String): NClob = ???
-
- override def getSQLXML(columnIndex: Int): SQLXML = ???
-
- override def getSQLXML(columnLabel: String): SQLXML = ???
-
- override def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = ()
-
- override def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = ()
-
- override def getNString(columnIndex: Int): String = ???
-
- override def getNString(columnLabel: String): String = ???
-
- override def getNCharacterStream(columnIndex: Int): Reader = ???
-
- override def getNCharacterStream(columnLabel: String): Reader = ???
-
- override def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = ()
-
- override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = ()
-
- override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = ()
-
- override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = ()
-
- override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = ()
-
- override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = ()
-
- override def updateClob(columnIndex: Int, reader: Reader): Unit = ()
-
- override def updateClob(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateNClob(columnIndex: Int, reader: Reader): Unit = ()
-
- override def updateNClob(columnLabel: String, reader: Reader): Unit = ()
-
- override def getObject[T](columnIndex: Int, `type`: Class[T]): T = ???
-
- override def getObject[T](columnLabel: String, `type`: Class[T]): T = {
- val value: Any = currentData.getField(columnLabel)
-
- val ldtCls = classOf[LocalDateTime]
- val clotm = classOf[Timestamp]
-
- //case ldt: LocalDateTime => Timestamp.valueOf(ldt)
- // case _ => currentData.getField(columnIndex - 1).asInstanceOf[Timestamp]
-
- `type` match {
- case clotm => value.asInstanceOf[T]
- case ldtCls =>
- // from timestamp => localdatetime
- value.asInstanceOf[T]
- case _ => value.asInstanceOf[T]
- }
- }
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = ???
-
- override def updateDate(columnIndex: Int, x: Date): Unit = ()
-
- override def updateDate(columnLabel: String, x: Date): Unit = ()
-}
-
-// scalastyle:on
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSetMetaData.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSetMetaData.scala
deleted file mode 100644
index a1ed11a..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcResultSetMetaData.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-
-import java.sql.ResultSetMetaData
-import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`
-
-// scalastyle:off
-class FlinkJdbcResultSetMetaData(val data: DataFrame) extends ResultSetMetaData{
- override def getColumnCount: Int = getFields.size
-
- private def getFields = {
- data.getResolvedSchema.getColumns.map(_.getName).toList
- }
-
- override def isAutoIncrement(column: Int): Boolean = false
-
- override def isCaseSensitive(column: Int): Boolean = false
-
- override def isSearchable(column: Int): Boolean = true
-
- override def isCurrency(column: Int): Boolean = false
-
- override def isNullable(column: Int): Int = 0
-
- override def isSigned(column: Int): Boolean = false
-
- override def getColumnDisplaySize(column: Int): Int = getFields(column - 1).length
-
- override def getColumnLabel(column: Int): String = getFields(column - 1)
-
- override def getColumnName(column: Int): String = getFields(column - 1)
-
- override def getSchemaName(column: Int): String = getFields(column - 1)
-
- override def getPrecision(column: Int): Int = 0
-
- override def getScale(column: Int): Int = 0
-
- override def getTableName(column: Int): String = ""
-
- override def getCatalogName(column: Int): String = ""
-
- override def getColumnType(column: Int): Int = 0
-
- override def getColumnTypeName(column: Int): String = data.getResolvedSchema.getColumns.map(_.getDataType.toString).toList(column - 1)
-
- override def isReadOnly(column: Int): Boolean = false
-
- override def isWritable(column: Int): Boolean = true
-
- override def isDefinitelyWritable(column: Int): Boolean = true
-
- override def getColumnClassName(column: Int): String = ""
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = ???
-}
-
-// scalastyle:on
\ No newline at end of file
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcStatement.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcStatement.scala
deleted file mode 100644
index 7dc33e8..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/driver/FlinkJdbcStatement.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.driver
-
-import com.github.sharpdata.sharpetl.flink.extra.driver.FlinkJdbcStatement.fixedResult
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.{DataTypes, Table}
-import org.apache.flink.table.api.Expressions.row
-
-import java.sql.{Connection, ResultSet, SQLWarning, Statement}
-
-// scalastyle:off
-class FlinkJdbcStatement extends Statement {
-
- private var resultSet: FlinkJdbcResultSet = null
-
- override def executeQuery(sql: String): ResultSet = {
- println(s"[DRIVER] exscuting sql $sql")
- new FlinkJdbcResultSet(ETLFlinkSession.batchEnv.sqlQuery(sql), this)
- }
-
- override def executeUpdate(sql: String): Int = {
- println(s"[DRIVER] exscuting sql $sql")
- ETLFlinkSession.batchEnv.executeSql(sql)
- 0
- }
-
- override def close(): Unit = ()
-
- override def getMaxFieldSize: Int = 0
-
- override def setMaxFieldSize(max: Int): Unit = ()
-
- override def getMaxRows: Int = 0
-
- override def setMaxRows(max: Int): Unit = ()
-
- override def setEscapeProcessing(enable: Boolean): Unit = ()
-
- override def getQueryTimeout: Int = 0
-
- override def setQueryTimeout(seconds: Int): Unit = ()
-
- override def cancel(): Unit = ()
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def setCursorName(name: String): Unit = ()
-
- override def execute(sql: String): Boolean = {
- ETLFlinkSession.batchEnv.executeSql(sql).print()
-
- this.resultSet = new FlinkJdbcResultSet(fixedResult, this)
- false
- }
-
- override def getResultSet: ResultSet = resultSet
-
- override def getUpdateCount: Int = -1
-
- override def getMoreResults: Boolean = !this.resultSet.alreadyTheLast()
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getResultSetConcurrency: Int = 0
-
- override def getResultSetType: Int = 0
-
- override def addBatch(sql: String): Unit = ()
-
- override def clearBatch(): Unit = ()
-
- override def executeBatch(): Array[Int] = Array.empty
-
- override def getConnection: Connection = new FlinkJdbcConnection()
-
- override def getMoreResults(current: Int): Boolean = !this.resultSet.alreadyTheLast()
-
- override def getGeneratedKeys: ResultSet = null
-
- override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = 0
-
- override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = 0
-
- override def executeUpdate(sql: String, columnNames: Array[String]): Int = 0
-
- override def execute(sql: String, autoGeneratedKeys: Int): Boolean = false
-
- override def execute(sql: String, columnIndexes: Array[Int]): Boolean = false
-
- override def execute(sql: String, columnNames: Array[String]): Boolean = false
-
- override def getResultSetHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def setPoolable(poolable: Boolean): Unit = ()
-
- override def isPoolable: Boolean = false
-
- override def closeOnCompletion(): Unit = ()
-
- override def isCloseOnCompletion: Boolean = false
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-
-object FlinkJdbcStatement {
- val fixedResult: Table = ETLFlinkSession.batchEnv.fromValues(
- DataTypes.ROW(
- DataTypes.FIELD("result", DataTypes.STRING())
- ),
- row("SUCCESS")
- )
-}
-
-// scalastyle:on
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveConnection.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveConnection.java
deleted file mode 100644
index 2681e09..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveConnection.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import org.flywaydb.core.internal.database.base.Connection;
-import org.flywaydb.core.internal.database.base.Schema;
-
-import java.sql.SQLException;
-
-public class HiveConnection extends Connection {
- protected HiveConnection(HiveDatabase database, java.sql.Connection connection) {
- super(database, connection);
- }
-
- @Override
- protected String getCurrentSchemaNameOrSearchPath() throws SQLException {
- return jdbcTemplate.queryForString("SELECT current_database()");
- }
-
- @Override
- public Schema getSchema(String name) {
- return new HiveSchema(this.getJdbcTemplate(), this.database, name);
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabase.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabase.java
deleted file mode 100644
index d383500..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabase.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig;
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession;
-import org.flywaydb.core.api.CoreMigrationType;
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.internal.database.base.Database;
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
-import org.flywaydb.core.internal.jdbc.StatementInterceptor;
-import org.flywaydb.core.internal.util.AbbreviationUtils;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-public class HiveDatabase extends Database {
- public HiveDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
- super(configuration, jdbcConnectionFactory, statementInterceptor);
- }
-
- @Override
- protected String doGetCatalog() throws SQLException {
- return ETLConfig.getProperty("flyway.catalog");
- }
-
- private String doGetDatabase() {
- return ETLConfig.getProperty("flyway.database");
- }
-
- @Override
- protected HiveConnection doGetConnection(Connection connection) {
- return new HiveConnection(this, connection);
- }
-
- @Override
- public void ensureSupported() {
-
- }
-
- @Override
- public boolean supportsDdlTransactions() {
- return false;
- }
-
- @Override
- public String getBooleanTrue() {
- return "true";
- }
-
- @Override
- public String getBooleanFalse() {
- return "false";
- }
-
- @Override
- public String doQuote(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- protected String getOpenQuote() {
- return "`";
- }
-
- @Override
- protected String getCloseQuote() {
- return "`";
- }
-
- @Override
- public String getEscapedQuote() {
- return "\\`";
- }
-
- @Override
- public boolean catalogIsSchema() {
- return true;
- }
-
- @Override
- public String getRawCreateScript(Table table, boolean baseline) {
- ETLFlinkSession.batchEnv().executeSql("create database if not exists " + doGetDatabase() + ";");
- return "CREATE TABLE " + table + " (\n" +
- " `installed_rank` INT,\n" +
- " `version` STRING,\n" +
- " `description` STRING,\n" +
- " `type` STRING,\n" +
- " `script` STRING,\n" +
- " `checksum` INT,\n" +
- " `installed_by` STRING,\n" +
- " `installed_on` TIMESTAMP,\n" +
- " `execution_time` INT,\n" +
- " `success` BOOLEAN\n" +
- ");\n" + baselineStatement(table) + ";\n";
- }
-
- @Override
- public String getSelectStatement(Table table) {
- return "SELECT " + quote("installed_rank")
- + "," + quote("version")
- + "," + quote("description")
- + "," + quote("type")
- + "," + quote("script")
- + "," + quote("checksum")
- + "," + quote("installed_on")
- + "," + quote("installed_by")
- + "," + quote("execution_time")
- + "," + quote("success")
- + " FROM " + table
- + " WHERE " + quote("installed_rank") + " > ?"
- + " ORDER BY " + quote("installed_rank");
- }
-
- @Override
- public String getInsertStatement(Table table) {
- // Explicitly set installed_on to CURRENT_TIMESTAMP().
- return "INSERT INTO " + table
- + " (" + quote("installed_rank")
- + ", " + quote("version")
- + ", " + quote("description")
- + ", " + quote("type")
- + ", " + quote("script")
- + ", " + quote("checksum")
- + ", " + quote("installed_by")
- + ", " + quote("installed_on")
- + ", " + quote("execution_time")
- + ", " + quote("success")
- + ")"
- + " VALUES (?, ?, ?, ?, ?, ?, ?, NOW(), ?, ?)";
- }
-
- public String baselineStatement(Table table) {
- return String.format(getInsertStatement(table).replace("?", "%s"),
- 1,
- "'0'",
- "'" + AbbreviationUtils.abbreviateDescription(configuration.getBaselineDescription()) + "'",
- "'" + CoreMigrationType.BASELINE + "'",
- "'" + AbbreviationUtils.abbreviateScript(configuration.getBaselineDescription()) + "'",
- "0",
- "'" + getInstalledBy() + "'",
- 0,
- getBooleanTrue()
- );
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabaseType.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabaseType.java
deleted file mode 100644
index 8a998db..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveDatabaseType.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import org.flywaydb.core.api.ResourceProvider;
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.internal.database.base.BaseDatabaseType;
-import org.flywaydb.core.internal.database.base.Database;
-import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
-import org.flywaydb.core.internal.jdbc.StatementInterceptor;
-import org.flywaydb.core.internal.parser.Parser;
-import org.flywaydb.core.internal.parser.ParsingContext;
-
-import java.sql.Connection;
-import java.sql.Types;
-
-public class HiveDatabaseType extends BaseDatabaseType {
- @Override
- public String getName() {
- return "flink_sharp_etl";
- }
-
- @Override
- public int getNullType() {
- return Types.VARCHAR;
- }
-
- @Override
- public boolean handlesJDBCUrl(String url) {
- return url.startsWith("jdbc:flink_sharp_etl:");
- }
-
- @Override
- public String getDriverClass(String url, ClassLoader classLoader) {
- return "com.github.sharpdata.sharpetl.flink.extra.driver.FlinkJdbcDriver";
- }
-
- @Override
- public boolean handlesDatabaseProductNameAndVersion(String databaseProductName, String databaseProductVersion, Connection connection) {
- return databaseProductName.startsWith("flink_sharp_etl");
- }
-
- @Override
- public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
- return new HiveDatabase(configuration, jdbcConnectionFactory, statementInterceptor);
- }
-
- @Override
- public Parser createParser(Configuration configuration, ResourceProvider resourceProvider, ParsingContext parsingContext) {
- return new HiveParser(configuration, parsingContext, 8);
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveParser.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveParser.java
deleted file mode 100644
index e98f212..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveParser.java
+++ /dev/null
@@ -1,184 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.api.resource.Resource;
-import org.flywaydb.core.internal.parser.*;
-import org.flywaydb.core.internal.sqlscript.Delimiter;
-import org.flywaydb.core.internal.sqlscript.ParsedSqlStatement;
-import org.flywaydb.core.internal.sqlscript.SqlStatement;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Set;
-import java.util.regex.Pattern;
-
-public class HiveParser extends Parser {
- protected HiveParser(Configuration configuration, ParsingContext parsingContext, int peekDepth) {
- super(configuration, parsingContext, peekDepth);
- }
-
- @Override
- protected Delimiter getDefaultDelimiter() {
- return super.getDefaultDelimiter();
- }
-
- @Override
- protected char getIdentifierQuote() {
- return super.getIdentifierQuote();
- }
-
- @Override
- protected char getAlternativeIdentifierQuote() {
- return super.getAlternativeIdentifierQuote();
- }
-
- @Override
- protected char getAlternativeStringLiteralQuote() {
- return super.getAlternativeStringLiteralQuote();
- }
-
- @Override
- protected char getOpeningIdentifierSymbol() {
- return super.getOpeningIdentifierSymbol();
- }
-
- @Override
- protected char getClosingIdentifierSymbol() {
- return super.getClosingIdentifierSymbol();
- }
-
- @Override
- protected Set getValidKeywords() {
- return super.getValidKeywords();
- }
-
- @Override
- protected boolean supportsPeekingMultipleLines() {
- return super.supportsPeekingMultipleLines();
- }
-
- @Override
- protected SqlStatement getNextStatement(Resource resource, PeekingReader reader, Recorder recorder, PositionTracker tracker, ParserContext context) {
- return super.getNextStatement(resource, reader, recorder, tracker, context);
- }
-
- @Override
- protected boolean shouldAdjustBlockDepth(ParserContext context, List tokens, Token token) {
- return super.shouldAdjustBlockDepth(context, tokens, token);
- }
-
- @Override
- protected boolean shouldDiscard(Token token, boolean nonCommentPartSeen) {
- return super.shouldDiscard(token, nonCommentPartSeen);
- }
-
- @Override
- protected void resetDelimiter(ParserContext context) {
- super.resetDelimiter(context);
- }
-
- @Override
- protected void adjustDelimiter(ParserContext context, StatementType statementType) {
- super.adjustDelimiter(context, statementType);
- }
-
- @Override
- protected int getTransactionalDetectionCutoff() {
- return super.getTransactionalDetectionCutoff();
- }
-
- @Override
- protected void adjustBlockDepth(ParserContext context, List tokens, Token keyword, PeekingReader reader) throws IOException {
- super.adjustBlockDepth(context, tokens, keyword, reader);
- }
-
- @Override
- protected int getLastKeywordIndex(List tokens) {
- return super.getLastKeywordIndex(tokens);
- }
-
- @Override
- protected int getLastKeywordIndex(List tokens, int endIndex) {
- return super.getLastKeywordIndex(tokens, endIndex);
- }
-
- @Override
- protected boolean doTokensMatchPattern(List previousTokens, Token current, Pattern regex) {
- return super.doTokensMatchPattern(previousTokens, current, regex);
- }
-
- @Override
- protected ParsedSqlStatement createStatement(PeekingReader reader, Recorder recorder, int statementPos, int statementLine, int statementCol, int nonCommentPartPos, int nonCommentPartLine, int nonCommentPartCol, StatementType statementType, boolean canExecuteInTransaction, Delimiter delimiter, String sql) throws IOException {
- return super.createStatement(reader, recorder, statementPos, statementLine, statementCol, nonCommentPartPos, nonCommentPartLine, nonCommentPartCol, statementType, canExecuteInTransaction, delimiter, sql);
- }
-
- @Override
- protected Boolean detectCanExecuteInTransaction(String simplifiedStatement, List keywords) {
- return super.detectCanExecuteInTransaction(simplifiedStatement, keywords);
- }
-
- @Override
- protected String readKeyword(PeekingReader reader, Delimiter delimiter, ParserContext context) throws IOException {
- return super.readKeyword(reader, delimiter, context);
- }
-
- @Override
- protected String readIdentifier(PeekingReader reader) throws IOException {
- return super.readIdentifier(reader);
- }
-
- @Override
- protected Token handleDelimiter(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleDelimiter(reader, context, pos, line, col);
- }
-
- @Override
- protected boolean isAlternativeStringLiteral(String peek) {
- return super.isAlternativeStringLiteral(peek);
- }
-
- @Override
- protected boolean isDelimiter(String peek, ParserContext context, int col, int colIgnoringWhitepace) {
- return super.isDelimiter(peek, context, col, colIgnoringWhitepace);
- }
-
- @Override
- protected boolean isLetter(char c, ParserContext context) {
- return super.isLetter(c, context);
- }
-
- @Override
- protected boolean isSingleLineComment(String peek, ParserContext context, int col) {
- return super.isSingleLineComment(peek, context, col);
- }
-
- @Override
- protected boolean isKeyword(String text) {
- return super.isKeyword(text);
- }
-
- @Override
- protected boolean isCommentDirective(String peek) {
- return super.isCommentDirective(peek);
- }
-
- @Override
- protected Token handleCommentDirective(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleCommentDirective(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleStringLiteral(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleStringLiteral(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleAlternativeStringLiteral(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleAlternativeStringLiteral(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleKeyword(PeekingReader reader, ParserContext context, int pos, int line, int col, String keyword) throws IOException {
- return super.handleKeyword(reader, context, pos, line, col, keyword);
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveSchema.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveSchema.java
deleted file mode 100644
index 5c162ec..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveSchema.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig;
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.catalog.Catalog;
-import org.flywaydb.core.internal.database.base.Schema;
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcTemplate;
-
-import java.sql.SQLException;
-import java.util.List;
-import java.util.Optional;
-
-public class HiveSchema extends Schema {
- /**
- * @param jdbcTemplate The Jdbc Template for communicating with the DB.
- * @param database The database-specific support.
- * @param name The name of the schema.
- */
- public HiveSchema(JdbcTemplate jdbcTemplate, HiveDatabase database, String name) {
- super(jdbcTemplate, database, name);
- }
-
- @Override
- protected boolean doExists() throws SQLException {
- final TableEnvironment session = ETLFlinkSession.batchEnv();
- ETLFlinkSession.createCatalogIfNeed("flink_sharp_etl", session);
- final Optional catalog = session.getCatalog(ETLConfig.getProperty("flyway.catalog"));
- catalog.get();
- return true;
- //return jdbcTemplate.queryForStringList("SHOW SCHEMAS").contains(name);
- }
-
- @Override
- protected boolean doEmpty() throws SQLException {
- return allTables().length == 0;
- }
-
- @Override
- protected void doCreate() throws SQLException {
- jdbcTemplate.execute("CREATE SCHEMA " + database.quote(name));
- }
-
- @Override
- protected void doDrop() throws SQLException {
- clean();
- jdbcTemplate.execute("DROP SCHEMA " + database.quote(name) + " RESTRICT");
- }
-
- @Override
- protected void doClean() throws SQLException {
- for (Table table : allTables())
- table.drop();
- }
-
- @Override
- protected Table[] doAllTables() throws SQLException {
- final String[] tableNames = ETLFlinkSession.batchEnv().listTables(ETLConfig.getProperty("flyway.catalog"), ETLConfig.getProperty("flyway.database"));
-
- Table[] tables = new Table[tableNames.length];
- for (int i = 0; i < tableNames.length; i++) {
- tables[i] = new HiveTable(jdbcTemplate, database, this, tableNames[i]);
- }
- return tables;
- }
-
- @Override
- public Table getTable(String tableName) {
- return new HiveTable(jdbcTemplate, database, this, tableName);
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveTable.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveTable.java
deleted file mode 100644
index 2fb409e..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/extra/flyway/hive/HiveTable.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.extra.flyway.hive;
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig;
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession;
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcTemplate;
-
-import java.sql.SQLException;
-import java.util.Arrays;
-
-public class HiveTable extends Table {
- /**
- * @param jdbcTemplate The JDBC template for communicating with the DB.
- * @param database The database-specific support.
- * @param schema The schema this table lives in.
- * @param name The name of the table.
- */
- public HiveTable(JdbcTemplate jdbcTemplate, HiveDatabase database, HiveSchema schema, String name) {
- super(jdbcTemplate, database, schema, name);
- }
-
- @Override
- protected boolean doExists() throws SQLException {
- return Arrays.stream(ETLFlinkSession.batchEnv().listTables(ETLConfig.getProperty("flyway.catalog"), ETLConfig.getProperty("flyway.database"))).anyMatch(it -> it.contentEquals(name));
- }
-
- @Override
- protected void doLock() throws SQLException {
-
- }
-
- @Override
- protected void doDrop() throws SQLException {
-
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/FlinkWorkflowInterpreter.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/FlinkWorkflowInterpreter.scala
deleted file mode 100644
index ec186a6..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/FlinkWorkflowInterpreter.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.job
-
-import com.github.sharpdata.sharpetl.core.api.{Variables, WorkflowInterpreter}
-import com.github.sharpdata.sharpetl.core.exception.Exception.IncrementalDiffModeTooMuchDataException
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRule
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants._
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{BigIntToLocalDateTime, L_YYYY_MM_DD_HH_MM_SS}
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.{incrementalDiffModeDataLimit, partitionColumn}
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.StringUtil.BigIntConverter
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import com.github.sharpdata.sharpetl.flink.quality.FlinkQualityCheck
-import com.github.sharpdata.sharpetl.flink.util.ETLFlinkSession
-import org.apache.flink.table.api.TableEnvironment
-
-import scala.collection.convert.ImplicitConversions._
-import scala.collection.immutable
-import scala.jdk.CollectionConverters._
-import scala.util.control.NonFatal
-
-class FlinkWorkflowInterpreter(override val tEnv: TableEnvironment,
- override val dataQualityCheckRules: Map[String, QualityCheckRule],
- override val qualityCheckAccessor: QualityCheckAccessor)
- extends FlinkQualityCheck(tEnv, dataQualityCheckRules, qualityCheckAccessor) with WorkflowInterpreter[DataFrame] {
-
-
- override def evalSteps(steps: List[WorkflowStep], jobLog: JobLog, variables: Variables, start: String, end: String): Unit = {
- super.evalSteps(steps, jobLog, variables, start, end)
- }
-
- // deprecated method
- override def listFiles(step: WorkflowStep): List[String] = ???
-
- // deprecated method
- override def deleteSource(step: WorkflowStep): Unit = ???
-
- // deprecated method
- override def readFile(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables,
- files: List[String]): DataFrame = ???
-
- override def executeWrite(jobLog: JobLog, df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val stepLog = jobLog.getStepLog(step.step)
- val incrementalType = jobLog.logDrivenType
- ETLLogger.info(s"incremental type is ${incrementalType}")
- val dfCount = df.execute().collect().asScala.size
- if (incrementalType == IncrementalType.DIFF && dfCount > incrementalDiffModeDataLimit.toLong) {
- throw IncrementalDiffModeTooMuchDataException(
- s"Incremental diff mode data limit is $incrementalDiffModeDataLimit, but current data count is ${dfCount}"
- )
- }
- if (incrementalType != IncrementalType.AUTO_INC_ID
- && incrementalType != IncrementalType.KAFKA_OFFSET
- && incrementalType != IncrementalType.UPSTREAM) {
- //`dataRangeStart` must be a datetime
- //value of partition column, we will use it later
- variables.put(s"$${$partitionColumn}", jobLog.dataRangeStart.asBigInt.asLocalDateTime().format(L_YYYY_MM_DD_HH_MM_SS))
- }
- if (df != null) {
- val count = if (step.target.dataSourceType == DataSourceType.VARIABLES) 0 else dfCount
- stepLog.targetCount = count
- ETLLogger.info("[Physical Plan]:")
- try {
- IO.write(df, step, variables)
- stepLog.successCount = count
- stepLog.failureCount = 0
- } catch {
- case e: Throwable =>
- stepLog.successCount = 0
- stepLog.failureCount = count
- throw e
- }
- }
- }
-
- // scalastyle:off
- override def executeRead(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables): DataFrame = {
- val stepLog = jobLog.getStepLog(step.step)
- val df = IO.read(tEnv, step, variables, jobLog)
- stepLog.sourceCount = if (step.target.dataSourceType == DataSourceType.VARIABLES) 0 else df.execute().collect().asScala.size
- df
- }
- // scalastyle:on
-
-
- /**
- * 释放资源
- */
- override def close(): Unit = {
- try {
- //ETLSparkSession.release(tEnv)
- } catch {
- case NonFatal(e) =>
- ETLLogger.error("Stop Spark session failed", e)
- }
- }
-
- override def applyConf(conf: Map[String, String]): Unit = {
- conf.foreach {
- case (key, value) =>
- ETLLogger.warn(s"Setting Flink conf $key=$value")
- tEnv.getConfig.set(key, value)
- }
- }
-
- override def applicationId(): String = ETLFlinkSession.wfName
-
- override def executeSqlToVariables(sql: String): List[Map[String, String]] = {
- val result = tEnv.sqlQuery(sql).execute()
- val schema = result.getResolvedSchema.getColumnNames
- val data: immutable.Seq[Map[String, String]] =
- result.collect().toList
- .map(it =>
- schema.asScala.map(col => (col, it.getField(col).toString)).toMap
- )
- data
- .map(
- it =>
- it.map {
- case (key, value) => ("${" + key + "}", value)
- }
- )
- .toList
- }
-
- override def union(left: DataFrame, right: DataFrame): DataFrame = {
- if (left != null && right != null) {
- left.union(right)
- } else if (left == null && right == null) {
- null // scalastyle:ignore
- } else if (left == null) {
- right
- } else {
- left
- }
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/IO.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/IO.scala
deleted file mode 100644
index 1776ec6..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/IO.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.job
-
-import com.github.sharpdata.sharpetl.core.annotation.AnnotationScanner
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.exception.Exception.EmptyDataException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants._
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import org.apache.flink.table.api.Expressions._
-import org.apache.flink.table.api.TableEnvironment
-
-
-object IO {
-
- def read(spark: TableEnvironment,
- step: WorkflowStep,
- variables: Variables,
- jobLog: JobLog): DataFrame = {
- val dataSourceConfig = step.getSourceConfig[DataSourceConfig]
-
- val value: Class[Source[_, _]] = AnnotationScanner.sourceRegister(dataSourceConfig.dataSourceType)
- assert(value != null)
-
- val df = value.getMethod("read", classOf[WorkflowStep], classOf[JobLog], classOf[TableEnvironment], classOf[Variables])
- .invoke(value.newInstance(), step, jobLog, spark, variables)
- .asInstanceOf[DataFrame]
-
- addDerivedColumns(dataSourceConfig, df)
- }
-
- def write(df: DataFrame,
- step: WorkflowStep,
- variables: Variables): Unit = {
- val targetConfig = step.getTargetConfig[DataSourceConfig]
- if ((step.throwExceptionIfEmpty == BooleanString.TRUE || step.skipFollowStepWhenEmpty == BooleanString.TRUE)
- && !df.execute().collect().hasNext) {
- throw EmptyDataException(s"Job aborted, because step ${step.step} 's result is empty", step.step)
- }
-
- val value: Class[Sink[_]] = AnnotationScanner.sinkRegister(targetConfig.dataSourceType)
- assert(value != null)
-
- value.getMethod("write", classOf[DataFrame], classOf[WorkflowStep], classOf[Variables])
- .invoke(value.newInstance(), df, step, variables)
- }
-
- private def addDerivedColumns(dataSourceConfig: DataSourceConfig, df: DataFrame): DataFrame = {
- df
- if (dataSourceConfig.derivedColumns != null) {
- val derivedColumns = dataSourceConfig.derivedColumns
- .split(";")
- .map(_.split(":"))
-
- derivedColumns.foldLeft(df)((df: DataFrame, derivedColumn: Array[String]) =>
- df.addOrReplaceColumns(concat($(derivedColumn(1)), derivedColumn(0)))
- )
- } else {
- df
- }
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/Types.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/Types.scala
deleted file mode 100644
index e9717af..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/job/Types.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.job
-
-import org.apache.flink.table.api.Table
-
-object Types {
- type DataFrame = Table
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/quality/FlinkQualityCheck.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/quality/FlinkQualityCheck.scala
deleted file mode 100644
index 0bb0357..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/quality/FlinkQualityCheck.scala
+++ /dev/null
@@ -1,229 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.quality
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck._
-import com.github.sharpdata.sharpetl.core.quality._
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, StringUtil}
-import com.github.sharpdata.sharpetl.flink.extra.driver.FlinkJdbcStatement.fixedResult
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-import org.apache.flink.table.api.Expressions._
-import org.apache.flink.table.api.internal.TableEnvironmentImpl
-import org.apache.flink.table.api.{TableEnvironment, ValidationException}
-import org.apache.flink.table.operations.{ModifyOperation, Operation, QueryOperation}
-
-import java.util
-import scala.jdk.CollectionConverters.asScalaIteratorConverter
-
-@Stable(since = "1.0.0")
-class FlinkQualityCheck(val tEnv: TableEnvironment,
- override val dataQualityCheckRules: Map[String, QualityCheckRule],
- override val qualityCheckAccessor: QualityCheckAccessor)
- extends QualityCheck[DataFrame] {
-
- override def queryCheckResult(sql: String): Seq[DataQualityCheckResult] = {
- if (sql.trim == "") {
- Seq()
- } else {
- ETLLogger.info(s"execution sql:\n $sql")
- tEnv.sqlQuery(sql).execute().collect().asScala
- .map(it => DataQualityCheckResult(
- // scalastyle:off
- it.getField(0).toString, // column
- it.getField(1).toString, // dataCheckType
- it.getField(2).toString, // ids
- it.getField(3).toString.split(DELIMITER).head, // errorType
- it.getField(4).toString.toInt, // warnCount
- it.getField(5).toString.toInt) // errorCount
- // scalastyle:on
- )
- .filterNot(it => it.warnCount < 1 && it.errorCount < 1)
- .toSeq
- }
- }
-
- override def execute(sql: String): DataFrame = {
- ETLLogger.info(s"Execution sql: \n $sql")
- val impl = tEnv.asInstanceOf[TableEnvironmentImpl]
- val operations: util.List[Operation] = impl.getParser.parse(sql)
- if (operations.size != 1) {
- throw new ValidationException("Unsupported SQL query! sqlQuery() only accepts a single SQL query.")
- }
- else {
- val operation: Operation = operations.get(0)
- operation match {
- case op: QueryOperation if !operation.isInstanceOf[ModifyOperation] =>
- impl.createTable(op)
- case _ =>
- tEnv.executeSql(sql)
- fixedResult
- }
- }
- }
-
- override def createView(df: DataFrame, tempViewName: String): Unit = {
- ETLLogger.info(s"Creating temp view `$tempViewName`")
- tEnv.createTemporaryView(s"`$tempViewName`", df)
- }
-
- override def dropView(tempViewName: String): Unit = {
- ETLLogger.info(s"Dropping temp view `$tempViewName`")
- tEnv.dropTemporaryView(s"`$tempViewName`")
- }
-
- override def dropUnusedCols(df: DataFrame, cols: String): DataFrame = {
- df.dropColumns(cols.split(",").map(col => $(col.trim)).toArray: _*)
- }
-
- override def windowByPkSql(tempViewName: String, idColumns: String, sortColumns: String = "", desc: Boolean = true): String = {
- s"""
- |SELECT *, 1 as __row_num
- |FROM (SELECT *, MAX($sortColumns)
- | OVER (PARTITION BY $idColumns) as __max__
- | FROM `$tempViewName`
- |) WHERE $idColumns = __max__""".stripMargin
- }
-
- override def windowByPkSqlErrors(tempViewName: String, idColumns: String, sortColumns: String = "", desc: Boolean = true): String = {
- s"""
- |SELECT ${joinIdColumns(idColumns)} as id,
- | ARRAY['Duplicated PK check$DELIMITER$idColumns'] as error_result,
- | 1 as __row_num
- |FROM (SELECT *, MAX($sortColumns)
- | OVER (PARTITION BY $idColumns) as __max__
- | FROM `$tempViewName`
- |) WHERE $idColumns = __max__""".stripMargin
- }
-
- override def generateErrorUnions(dataQualityCheckMapping: Seq[DataQualityConfig], topN: Int, view: String): String = {
- dataQualityCheckMapping
- .filter(_.errorType == ErrorType.error)
- .map(it =>
- s"""(SELECT
- | '${it.column}' as `column`,
- | '${it.dataCheckType}' as dataCheckType,
- | LISTAGG(CAST(id as STRING)) as ids,
- | '${it.errorType}' as errorType,
- | 0 as warnCount,
- | count(*) as errorCount
- |FROM `$view`
- |WHERE ARRAY_CONTAINS(error_result, '${it.dataCheckType}${DELIMITER}${it.column}')
- |)""".stripMargin
- )
- .mkString("\nUNION ALL\n")
- }
-
- override def generateWarnUnions(dataQualityCheckMapping: Seq[DataQualityConfig], topN: Int, view: String): String = {
- dataQualityCheckMapping
- .filter(_.errorType == ErrorType.warn)
- .map(it =>
- s"""(SELECT
- | '${it.column}' as `column`,
- | '${it.dataCheckType}' as dataCheckType,
- | LISTAGG(CAST(id as STRING)) as ids,
- | '${it.errorType}' as errorType,
- | count(*) as warnCount,
- | 0 as errorCount
- |FROM `$view`
- |WHERE ARRAY_CONTAINS(warn_result, '${it.dataCheckType}${DELIMITER}${it.column}')
- |)""".stripMargin
- )
- .mkString("\nUNION ALL\n")
- }
-
- override def checkSql(tempViewName: String, resultView: String, dataQualityCheckMapping: Seq[DataQualityConfig], idColumn: String): String = {
- s"""
- |CREATE TEMPORARY VIEW `$resultView`
- |AS SELECT ${joinIdColumns(idColumn)} as id,
- | ARRAY[${generateWarnCases(dataQualityCheckMapping)}
- | ] as warn_result,
- | ARRAY[${generateErrorCases(dataQualityCheckMapping)}
- | ] as error_result
- |FROM `$tempViewName`
- """.stripMargin
- }
-
- override def udrWarnSql(topN: Int, udrWithViews: Seq[(DataQualityConfig, String)])
- : String = {
- if (udrWithViews.isEmpty) {
- StringUtil.EMPTY
- } else {
- udrWithViews.map { case (udr, viewName) =>
- s"""
- |(SELECT '${udr.column}' as column,
- | '${udr.dataCheckType}' as dataCheckType,
- | LISTAGG(CAST(id as STRING)) as ids,
- | '${udr.errorType}' as errorType,
- | count(*) as warnCount,
- | 0 as errorCount
- |FROM `$viewName`)
- |""".stripMargin
- }
- .mkString("\nUNION ALL\n")
- }
- }
-
- override def udrErrorSql(topN: Int, udrWithViews: Seq[(DataQualityConfig, String)])
- : String = {
- if (udrWithViews.isEmpty) {
- StringUtil.EMPTY
- } else {
- udrWithViews.map { case (udr, viewName) =>
- s"""
- |(SELECT '${udr.column}' as column,
- | '${udr.dataCheckType}' as dataCheckType,
- | LISTAGG(CAST(id as STRING)) as ids,
- | '${udr.errorType}' as errorType,
- | 0 as warnCount,
- | count(*) as errorCount
- |FROM `$viewName`)
- |""".stripMargin
- }
- .mkString("\nUNION ALL\n")
- }
- }
-
- override def antiJoinSql(idColumn: String, tempViewName: String, resultView: String): String = {
- s"""|WHERE `$tempViewName`.`$idColumn` NOT IN (
- | SELECT id FROM `$resultView`
- | WHERE CARDINALITY(error_result) > 0
- |)
- """.stripMargin
- }
-
- override def udrAntiJoinSql(idColumn: String, tempViewName: String, viewNames: Seq[String]): String = {
- if (viewNames.isEmpty) {
- StringUtil.EMPTY
- } else {
- s"""|WHERE `$tempViewName`.`$idColumn` NOT IN (
- | SELECT id FROM (${viewNames.map(view => s"SELECT id FROM $view").mkString("\nUNION ALL\n")})
- | WHERE CARDINALITY(error_result) > 0
- |)
- |""".stripMargin
- }
- }
-
- def emptyArrayIfMissing(query: String): String = {
- if (query.trim == "") {
- "array[]"
- } else {
- query
- }
- }
-
- def generateWarnCases(dataQualityCheckMapping: Seq[DataQualityConfig]): String = {
- emptyArrayIfMissing(dataQualityCheckMapping
- .filter(_.errorType == ErrorType.warn)
- .map(it => s"""CASE WHEN ${it.rule} THEN '${it.dataCheckType}${DELIMITER}${it.column}' ELSE '' END""")
- .mkString(",\n\t\t\t\t")
- )
- }
-
- def generateErrorCases(dataQualityCheckMapping: Seq[DataQualityConfig]): String = {
- emptyArrayIfMissing(dataQualityCheckMapping
- .filter(_.errorType == ErrorType.error)
- .map(it => s"""CASE WHEN ${it.rule} THEN '${it.dataCheckType}${DELIMITER}${it.column}' ELSE '' END""")
- .mkString(",\n\t\t\t\t")
- )
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/udf/CollectWsUDF.java b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/udf/CollectWsUDF.java
deleted file mode 100644
index 90901c1..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/udf/CollectWsUDF.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.udf;
-
-import org.apache.flink.table.annotation.DataTypeHint;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-import java.util.Map;
-import java.util.Optional;
-
-public class CollectWsUDF extends ScalarFunction {
-
- public String eval(@DataTypeHint("MAP") Map multiset) {
- return String.join(",", multiset.keySet());
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder().outputTypeStrategy(callContext -> Optional.of(DataTypes.STRING())).build();
- }
-
-}
\ No newline at end of file
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/ETLFlinkSession.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/ETLFlinkSession.scala
deleted file mode 100644
index e6d02bb..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/ETLFlinkSession.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.util
-
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRule
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.util.Constants.ETLDatabaseType.FLINK_SHARP_ETL
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.flink.job.FlinkWorkflowInterpreter
-import com.github.sharpdata.sharpetl.flink.udf.CollectWsUDF
-import org.apache.flink.configuration.Configuration
-import org.apache.flink.table.api.{EnvironmentSettings, TableEnvironment}
-
-object ETLFlinkSession {
- var local = false
- var wfName = "default"
- private var autoCloseSession: Boolean = true
-
- val batchSettings: EnvironmentSettings = EnvironmentSettings.newInstance
- .withConfiguration(getConf())
- //.inStreamingMode
- .inBatchMode
- .build()
-
- val batchEnv: TableEnvironment = TableEnvironment.create(batchSettings)
-
- private def getConf(): Configuration = {
- val conf = new Configuration
- ETLConfig
- .getFlinkProperties(wfName)
- .foreach {
- case (key, value) =>
- conf.setString(key, value)
- println(s"[Set flink config]: $key=$value")
- }
- conf
- }
-
- def initUdf(session: TableEnvironment): Unit = {
- session.createTemporarySystemFunction("collect_ws", classOf[CollectWsUDF])
- }
-
- def getFlinkInterpreter(local: Boolean,
- wfName: String,
- autoCloseSession: Boolean,
- etlDatabaseType: String,
- dataQualityCheckRules: Map[String, QualityCheckRule])
- : FlinkWorkflowInterpreter = {
- ETLFlinkSession.local = local
- ETLFlinkSession.wfName = wfName
- ETLFlinkSession.autoCloseSession = autoCloseSession
- val session = ETLFlinkSession.batchEnv
- initUdf(session)
- createCatalogIfNeed(etlDatabaseType, session)
- new FlinkWorkflowInterpreter(session, dataQualityCheckRules, QualityCheckAccessor.getInstance(etlDatabaseType))
- }
-
- // def release(spark: SparkSession): Unit = {
- // if (spark != null && autoCloseSession) {
- // spark.stop()
- // }
- // }
-
- def createCatalogIfNeed(etlDatabaseType: String, session: TableEnvironment): Unit = {
- if (etlDatabaseType == FLINK_SHARP_ETL) {
- val catalogName = ETLConfig.getProperty("flyway.catalog")
- val catalog = session.getCatalog(catalogName)
- if (!catalog.isPresent) {
- if (local) {
- ETLLogger.info(s"catalog $catalogName not found, create it")
- session.executeSql(
- s"""
- |CREATE CATALOG $catalogName
- |WITH (
- | 'type' = 'paimon',
- | 'warehouse' = '${ETLConfig.getProperty("flyway.warehouse")}',
- | 'fs.oss.endpoint' = '${ETLConfig.getProperty("flyway.endpoint")}',
- | 'fs.oss.accessKeyId' = '${ETLConfig.getProperty("flyway.ak")}',
- | 'fs.oss.accessKeySecret' = '${ETLConfig.getProperty("flyway.sk")}'
- |)""".stripMargin)
- ETLFlinkSession.batchEnv.useCatalog(catalogName)
- session.executeSql(s"CREATE DATABASE IF NOT EXISTS ${ETLConfig.getProperty("flyway.database")}")
- } else {
- throw new RuntimeException(s"catalog $catalogName not found")
- }
- }
- }
- }
-}
diff --git a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/VariablesUtil.scala b/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/VariablesUtil.scala
deleted file mode 100644
index 3b7b763..0000000
--- a/flink/src/main/scala/com/github/sharpdata/sharpetl/flink/util/VariablesUtil.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.github.sharpdata.sharpetl.flink.util
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.flink.job.Types.DataFrame
-
-import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`
-
-object VariablesUtil {
-
- def setVariables(
- df: DataFrame,
- variables: Variables): Unit = {
- if (df.execute().collect().hasNext) {
- val fieldNames = df.getResolvedSchema.getColumns.map(_.getName)
- val row = df.execute().collect().next()
- fieldNames.zipWithIndex.foreach {
- case (fieldName, idx) =>
- val fieldValue = if (row.getField(idx).toString == "null") {
- "null"
- } else {
- row.getField(idx).toString
- }
- val key = if (fieldName.matches("^#\\{.+\\}$")) {
- fieldName
- } else {
- String.format("${%s}", fieldName)
- }
- variables += key -> fieldValue
- }
- }
- ETLLogger.info(s"Variables: $variables")
- }
-
-}
diff --git a/gradle.properties b/gradle.properties
deleted file mode 100644
index d5ad4f8..0000000
--- a/gradle.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-scalaVersion=2.12
-sparkVersion=3.3.0
-scalaCompt=2.12.15
-flinkVersion=1.17.2
-
-#general/aliyun/GCP
-profiles=general
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index 41d9927..0000000
Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index ae04661..0000000
--- a/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,5 +0,0 @@
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
diff --git a/gradlew b/gradlew
deleted file mode 100755
index 1b6c787..0000000
--- a/gradlew
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/bin/sh
-
-#
-# Copyright © 2015-2021 the original authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-##############################################################################
-#
-# Gradle start up script for POSIX generated by Gradle.
-#
-# Important for running:
-#
-# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
-# noncompliant, but you have some other compliant shell such as ksh or
-# bash, then to run this script, type that shell name before the whole
-# command line, like:
-#
-# ksh Gradle
-#
-# Busybox and similar reduced shells will NOT work, because this script
-# requires all of these POSIX shell features:
-# * functions;
-# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
-# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
-# * compound commands having a testable exit status, especially «case»;
-# * various built-in commands including «command», «set», and «ulimit».
-#
-# Important for patching:
-#
-# (2) This script targets any POSIX shell, so it avoids extensions provided
-# by Bash, Ksh, etc; in particular arrays are avoided.
-#
-# The "traditional" practice of packing multiple parameters into a
-# space-separated string is a well documented source of bugs and security
-# problems, so this is (mostly) avoided, by progressively accumulating
-# options in "$@", and eventually passing that to Java.
-#
-# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
-# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
-# see the in-line comments for details.
-#
-# There are tweaks for specific operating systems such as AIX, CygWin,
-# Darwin, MinGW, and NonStop.
-#
-# (3) This script is generated from the Groovy template
-# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
-# within the Gradle project.
-#
-# You can find Gradle at https://github.com/gradle/gradle/.
-#
-##############################################################################
-
-# Attempt to set APP_HOME
-
-# Resolve links: $0 may be a link
-app_path=$0
-
-# Need this for daisy-chained symlinks.
-while
- APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
- [ -h "$app_path" ]
-do
- ls=$( ls -ld "$app_path" )
- link=${ls#*' -> '}
- case $link in #(
- /*) app_path=$link ;; #(
- *) app_path=$APP_HOME$link ;;
- esac
-done
-
-APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
-
-APP_NAME="Gradle"
-APP_BASE_NAME=${0##*/}
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD=maximum
-
-warn () {
- echo "$*"
-} >&2
-
-die () {
- echo
- echo "$*"
- echo
- exit 1
-} >&2
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-nonstop=false
-case "$( uname )" in #(
- CYGWIN* ) cygwin=true ;; #(
- Darwin* ) darwin=true ;; #(
- MSYS* | MINGW* ) msys=true ;; #(
- NONSTOP* ) nonstop=true ;;
-esac
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD=$JAVA_HOME/jre/sh/java
- else
- JAVACMD=$JAVA_HOME/bin/java
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD=java
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
- case $MAX_FD in #(
- max*)
- MAX_FD=$( ulimit -H -n ) ||
- warn "Could not query maximum file descriptor limit"
- esac
- case $MAX_FD in #(
- '' | soft) :;; #(
- *)
- ulimit -n "$MAX_FD" ||
- warn "Could not set maximum file descriptor limit to $MAX_FD"
- esac
-fi
-
-# Collect all arguments for the java command, stacking in reverse order:
-# * args from the command line
-# * the main class name
-# * -classpath
-# * -D...appname settings
-# * --module-path (only if needed)
-# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
-
-# For Cygwin or MSYS, switch paths to Windows format before running java
-if "$cygwin" || "$msys" ; then
- APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
- CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
-
- JAVACMD=$( cygpath --unix "$JAVACMD" )
-
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- for arg do
- if
- case $arg in #(
- -*) false ;; # don't mess with options #(
- /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
- [ -e "$t" ] ;; #(
- *) false ;;
- esac
- then
- arg=$( cygpath --path --ignore --mixed "$arg" )
- fi
- # Roll the args list around exactly as many times as the number of
- # args, so each arg winds up back in the position where it started, but
- # possibly modified.
- #
- # NB: a `for` loop captures its iteration list before it begins, so
- # changing the positional parameters here affects neither the number of
- # iterations, nor the values presented in `arg`.
- shift # remove old arg
- set -- "$@" "$arg" # push replacement arg
- done
-fi
-
-# Collect all arguments for the java command;
-# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
-# shell script including quotes and variable substitutions, so put them in
-# double quotes to make sure that they get re-expanded; and
-# * put everything else in single quotes, so that it's not re-expanded.
-
-set -- \
- "-Dorg.gradle.appname=$APP_BASE_NAME" \
- -classpath "$CLASSPATH" \
- org.gradle.wrapper.GradleWrapperMain \
- "$@"
-
-# Use "xargs" to parse quoted args.
-#
-# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
-#
-# In Bash we could simply go:
-#
-# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
-# set -- "${ARGS[@]}" "$@"
-#
-# but POSIX shell has neither arrays nor command substitution, so instead we
-# post-process each arg (as a line of input to sed) to backslash-escape any
-# character that might be a shell metacharacter, then use eval to reverse
-# that process (while maintaining the separation between arguments), and wrap
-# the whole thing up as a single "set" statement.
-#
-# This will of course break if any of these variables contains a newline or
-# an unmatched quote.
-#
-
-eval "set -- $(
- printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
- xargs -n1 |
- sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
- tr '\n' ' '
- )" '"$@"'
-
-exec "$JAVACMD" "$@"
diff --git a/gradlew.bat b/gradlew.bat
deleted file mode 100644
index 107acd3..0000000
--- a/gradlew.bat
+++ /dev/null
@@ -1,89 +0,0 @@
-@rem
-@rem Copyright 2015 the original author or authors.
-@rem
-@rem Licensed under the Apache License, Version 2.0 (the "License");
-@rem you may not use this file except in compliance with the License.
-@rem You may obtain a copy of the License at
-@rem
-@rem https://www.apache.org/licenses/LICENSE-2.0
-@rem
-@rem Unless required by applicable law or agreed to in writing, software
-@rem distributed under the License is distributed on an "AS IS" BASIS,
-@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-@rem See the License for the specific language governing permissions and
-@rem limitations under the License.
-@rem
-
-@if "%DEBUG%" == "" @echo off
-@rem ##########################################################################
-@rem
-@rem Gradle startup script for Windows
-@rem
-@rem ##########################################################################
-
-@rem Set local scope for the variables with windows NT shell
-if "%OS%"=="Windows_NT" setlocal
-
-set DIRNAME=%~dp0
-if "%DIRNAME%" == "" set DIRNAME=.
-set APP_BASE_NAME=%~n0
-set APP_HOME=%DIRNAME%
-
-@rem Resolve any "." and ".." in APP_HOME to make it shorter.
-for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
-
-@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
-
-@rem Find java.exe
-if defined JAVA_HOME goto findJavaFromJavaHome
-
-set JAVA_EXE=java.exe
-%JAVA_EXE% -version >NUL 2>&1
-if "%ERRORLEVEL%" == "0" goto execute
-
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:findJavaFromJavaHome
-set JAVA_HOME=%JAVA_HOME:"=%
-set JAVA_EXE=%JAVA_HOME%/bin/java.exe
-
-if exist "%JAVA_EXE%" goto execute
-
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:execute
-@rem Setup the command line
-
-set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
-
-
-@rem Execute Gradle
-"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
-
-:end
-@rem End local scope for the variables with windows NT shell
-if "%ERRORLEVEL%"=="0" goto mainEnd
-
-:fail
-rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
-rem the _cmd.exe /c_ return code!
-if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
-exit /b 1
-
-:mainEnd
-if "%OS%"=="Windows_NT" endlocal
-
-:omega
diff --git a/scalastyle_config.xml b/scalastyle_config.xml
deleted file mode 100644
index cc27ce5..0000000
--- a/scalastyle_config.xml
+++ /dev/null
@@ -1,137 +0,0 @@
-
- Scalastyle standard configuration
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/settings.gradle b/settings.gradle
deleted file mode 100644
index 54d3eed..0000000
--- a/settings.gradle
+++ /dev/null
@@ -1,12 +0,0 @@
-rootProject.name = 'SharpETL'
-include("core")
-include("data-modeling")
-include("spark")
-include("flink")
-include("datasource:hive2")
-include("datasource:hive3")
-include("datasource:kafka")
-include("datasource:elasticsearch")
-include("datasource:kudu")
-include("datasource:bigquery")
-
diff --git a/spark/build.gradle b/spark/build.gradle
deleted file mode 100644
index 96ce29d..0000000
--- a/spark/build.gradle
+++ /dev/null
@@ -1,644 +0,0 @@
-plugins {
- id "java-library"
- id "application"
- id "scala"
- id "com.github.alisiikh.scalastyle"
- id "com.github.johnrengelman.shadow" version "7.1.2"
- id "com.github.maiflai.scalatest"
- id "maven-publish"
-}
-
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
-
-group = 'com.github.sharpdata.sharpetl'
-sourceCompatibility = 1.8
-version = '0.2.0'
-
-publishing {
- publications {
- maven(MavenPublication) {
- artifactId = "sharp-etl-spark-${sparkVersion}_${scalaVersion}"
- afterEvaluate {
- //artifact(tasks.findByName("shadowJar"))
- artifact(tasks.findByName("sourceJar"))
- artifact(tasks.findByName("slimJar"))
- }
- }
- }
- repositories {
-// maven {
-// name 'maven-snapshots'
-// url "http://repo.maven.com/repository/maven-snapshots"
-// credentials {
-// username project.repoUser
-// password project.repoPassword
-// }
-// }
-
- maven {
- name = "GitHubPackages"
- url = uri("https://maven.pkg.github.com/SharpData/SharpETL")
- credentials {
- username = System.getenv("GITHUB_ACTOR")
- password = System.getenv("GITHUB_TOKEN")
- }
- }
- }
-}
-
-
-dependencies {
- implementation(project(":core"))
- implementation(project(":data-modeling"))
-// compileOnly(project(":datasource:hive3"))
-// testImplementation(project(":datasource:hive2"))
- implementation(project(":datasource:hive2"))
- implementation(project(":datasource:kafka"))
- implementation(project(":datasource:elasticsearch"))
- implementation(project(":datasource:bigquery"))
-
-// scoverage project(':core').sourceSets.scoverage.output
-// scoverage project(':data-modeling').sourceSets.scoverage.output
-// scoverage project(':datasource:hive3').sourceSets.scoverage.output
-// scoverage project(':datasource:hive2').sourceSets.scoverage.output
-// scoverage project(':datasource:kafka').sourceSets.scoverage.output
-// scoverage project(':datasource:elasticsearch').sourceSets.scoverage.output
-// scoverage project(':datasource:bigquery').sourceSets.scoverage.output
-
- implementation "net.liftweb:lift-json_$scalaVersion:3.4.3"
- implementation "org.scala-lang:scala-compiler:$scalaCompt"
- implementation "org.scala-lang:scala-library:$scalaCompt"
- implementation "org.scala-lang:scala-reflect:$scalaCompt"
- implementation "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0"
- implementation "com.google.guava:failureaccess:1.0.1"
- implementation "com.jayway.jsonpath:json-path:2.6.0"
- implementation "com.google.guava:guava:29.0-jre"
- implementation "org.apache.spark:spark-sql_$scalaVersion:$sparkVersion"
- implementation "org.apache.spark:spark-core_$scalaVersion:$sparkVersion"
- implementation "org.apache.spark:spark-streaming_$scalaVersion:$sparkVersion"
- implementation "org.apache.spark:spark-streaming-kafka-0-10_$scalaVersion:$sparkVersion"
- implementation "org.apache.kafka:kafka-clients:2.0.0"
- if (sparkVersion.startsWith("3.5")) {
- implementation "io.delta:delta-spark_$scalaVersion:3.0.0"
- } else if (sparkVersion.startsWith("3.4")) {
- implementation "io.delta:delta-core_$scalaVersion:2.4.0"
- } else if (sparkVersion.startsWith("3.3")) {
- implementation "io.delta:delta-core_$scalaVersion:2.3.0"
- } else if (sparkVersion.startsWith("3.2")) {
- implementation "io.delta:delta-core_$scalaVersion:2.0.2"
- } else if (sparkVersion.startsWith("3.1")) {
- implementation "io.delta:delta-core_$scalaVersion:1.0.1"
- } else if (sparkVersion.startsWith("3.0")) {
- implementation "io.delta:delta-core_$scalaVersion:0.8.0"
- } else if (sparkVersion.startsWith("2")) {
- implementation "io.delta:delta-core_$scalaVersion:0.6.1"
- }
- compileOnly 'org.projectlombok:lombok:1.18.22'
- annotationProcessor 'org.projectlombok:lombok:1.18.22'
- implementation "com.jcraft:jsch:0.1.55"
- implementation "org.jpmml:pmml-agent:1.5.3"
- implementation "org.jpmml:pmml-model:1.5.3"
- implementation "org.jpmml:pmml-evaluator:1.5.3"
- implementation "org.jpmml:pmml-evaluator-extension:1.5.3"
- if (scalaVersion.startsWith("2.13")) {
- implementation "com.crealytics:spark-excel_$scalaVersion:3.2.0_0.16.1-pre1"
- } else {
- implementation "com.crealytics:spark-excel_$scalaVersion:0.13.1"
- }
- implementation("commons-net:commons-net") {
- version {
- strictly '3.1'
- }
- }
- implementation("org.apache.logging.log4j:log4j-core") {
- version {
- require '2.17.1'
- }
- }
- implementation 'io.github.classgraph:classgraph:4.8.149'
- implementation group: 'com.jolbox', name: 'bonecp', version: '0.8.0.RELEASE'
- implementation group: "org.apache.hadoop", name: "hadoop-client", version: "2.7.2"
- implementation "org.apache.httpcomponents:httpclient:4.5.12"
-
- if (sparkVersion.startsWith("3.5")) {
- implementation "io.circe:circe-yaml_$scalaVersion:0.15.0-RC1"
- implementation "io.circe:circe-generic_$scalaVersion:0.15.0-M1"
- implementation "io.circe:circe-generic-extras_$scalaVersion:0.14.3"
- } else {
- implementation "io.circe:circe-yaml_$scalaVersion:0.11.0-M1"
- implementation "io.circe:circe-generic_$scalaVersion:0.12.0-M3"
- implementation "io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3"
- }
-
- if (profiles.contains("aliyun")) {
- implementation "com.aliyun.oss:aliyun-sdk-oss:3.16.0"
- implementation "com.aliyun.datalake:metastore-client-hive2:0.2.14"
- }
-
- if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.0")) {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.10.0'
- }
- }
- } else if (sparkVersion.startsWith("3.2")) {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.12.3'
- }
- }
- } else if (sparkVersion.startsWith("3.3")) {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.13.3'
- }
- }
- } else if (sparkVersion.startsWith("3.4")) {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.14.2'
- }
- }
- } else if (sparkVersion.startsWith("3.5")) {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.15.2'
- }
- }
- } else {
- implementation("com.fasterxml.jackson.core:jackson-databind") {
- version {
- strictly '2.6.7.3'
- }
- }
- }
- implementation group: "org.apache.commons", name: "commons-lang3", version: "3.10"
- implementation group: "mysql", name: "mysql-connector-java", version: "8.0.19"
- implementation group: "com.oracle.ojdbc", name: "ojdbc8", version: "19.3.0.0"
- implementation group: "com.microsoft.sqlserver", name: "mssql-jdbc", version: "9.4.0.jre8"
- implementation "net.sourceforge.jtds:jtds:1.3.1"
- implementation group: "com.ibm.informix", name: "jdbc", version: "4.10.14"
- implementation group: "org.apache.spark", name: "spark-sql-kafka-0-10_$scalaVersion", version: "$sparkVersion"
- implementation group: "org.postgresql", name: "postgresql", version: "42.2.14"
- //implementation 'org.apache.hive:hive-jdbc:2.3.7'
- implementation('io.github.coolbeevip:flyway-core:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation('io.github.coolbeevip:flyway-mysql:9.15.2.2') {
- exclude group: 'com.fasterxml.jackson.dataformat', module: 'jackson-dataformat-toml'
- }
- implementation group: "org.mybatis", name: "mybatis", version: "3.5.4"
- implementation 'info.picocli:picocli:4.6.3'
-
- implementation("org.fusesource.jansi:jansi") {
- // old version here otherwise there will crash the JVM,
- // taken from https://github.com/fusesource/jansi/issues/66#issuecomment-1018386584
- version {
- strictly '1.18'
- }
- }
-
- testImplementation group: 'org.mockito', name: "mockito-scala_$scalaVersion", version: '1.16.29'
- testImplementation group: "org.scalatest", name: "scalatest_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalatest", name: "scalatest-funspec_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.scalactic", name: "scalactic_$scalaVersion", version: "3.2.11"
- testImplementation group: "org.pegdown", name: "pegdown", version: "1.4.2"
- testImplementation("com.github.tomakehurst:wiremock-jre8:2.27.0") {
- exclude group: "com.fasterxml.jackson.core"
- }
- testImplementation group: "org.junit.jupiter", name: "junit-jupiter-api", version: "5.6.2"
- testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:5.6.2"
- testRuntimeOnly "com.vladsch.flexmark:flexmark-all:0.62.2"
-// if (sparkVersion.startsWith('2')) {
-// testImplementation("com.github.mrpowers:spark-fast-tests_$scalaVersion:0.23.0")
-// } else if (sparkVersion.startsWith("3")) {
-// testImplementation("com.github.mrpowers:spark-fast-tests_$scalaVersion:1.0.0")
-// }
- testImplementation "org.testcontainers:testcontainers:1.16.2"
- testImplementation "org.testcontainers:mysql:1.16.2"
- testImplementation "org.testcontainers:postgresql:1.17.2"
- testImplementation "org.testcontainers:mockserver:1.16.2"
- testImplementation "org.mock-server:mockserver-client-java:5.11.2"
-}
-
-configurations.all {
- exclude group: "org.apache.hadoop", module: "hadoop-annotations"
-}
-
-application {
- mainClassName = "com.github.sharpdata.sharpetl.spark.Entrypoint"
-}
-
-configurations.implementation.setCanBeResolved(true)
-configurations.api.setCanBeResolved(true)
-
-def addDeps(String dep) {
- println("Adding $dep into shadow jar...")
- shadowJar {
- dependencies {
- include(dependency(dep))
- }
- }
- def depSpec = createDepSpec(dep)
- Set resolvedDependencies = project.configurations.runtimeClasspath.getResolvedConfiguration().getFirstLevelModuleDependencies(depSpec)
- if (resolvedDependencies.isEmpty()) {
- println("ERROR: $dep not found in current project, please add $dep to project dependencies")
- }
- getResolvedArtifacts(resolvedDependencies).each { artifact ->
- println("Adding $artifact into shadow jar...")
- shadowJar {
- dependencies {
- include(dependency(artifact))
- }
- }
- }
-}
-
-def getResolvedArtifacts(Set artifacts) {
- Set resolvedArtifacts = [] as Set
- artifacts.each {
- // add current artifact
- resolvedArtifacts << "${it.moduleGroup}:${it.moduleName}:${it.moduleVersion}"
-
- // recursion to add children
- resolvedArtifacts += getResolvedArtifacts(it.children)
- }
- return resolvedArtifacts
-}
-
-def createDepSpec(String dep) {
- return new Spec() {
- @Override
- boolean isSatisfiedBy(Dependency dependency) {
- return dependency == project.dependencies.create(dep)
- }
- }
-}
-
-task slimJar(type: ShadowJar) {
- from(
- sourceSets.main.output,
- sourceSets.main.resources
- )
- configurations = [project.configurations.compileClasspath]
- dependencies {
- include(project(":core"))
- include(project(":data-modeling"))
- include(project(":datasource:hive3"))
- include(project(":datasource:kafka"))
- include(project(":datasource:elasticsearch"))
- include(project(":datasource:bigquery"))
-
- include dependency("com.google.guava:guava:29.0-jre")
- include dependency("commons-net:commons-net:3.1")
- }
- relocate("com.google", "com.github.sharpdata.sharpetl.google") {
- include 'com/google/guava/**'
- }
- relocate "org.apache.commons.net", "com.github.sharpdata.sharpetl.commons.net"
- archiveFileName = "sharp-etl-spark-${sparkVersion}_${scalaVersion}-${version}.jar"
-}
-
-shadowJar {
- zip64 true
- classifier null
- dependencies {
- include(project(":core"))
- include(project(":data-modeling"))
- //include(project(":datasource:hive3"))
- include(project(":datasource:hive2"))
- include(project(":datasource:kafka"))
- include(project(":datasource:elasticsearch"))
- include(project(":datasource:bigquery"))
-
- include dependency("org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0")
- include dependency("org.mvel:mvel2:2.4.13.Final")
- include dependency('org.jasypt:jasypt:1.9.3')
- include dependency("com.lihaoyi:fastparse_$scalaVersion:3.0.0")
- include dependency("io.github.classgraph:classgraph:4.8.149")
-
- if (profiles.contains("aliyun")) {
- addDeps("com.aliyun.oss:aliyun-sdk-oss:3.16.0")
- addDeps("com.aliyun.datalake:metastore-client-hive2:0.2.14")
- }
-
- include dependency("org.elasticsearch:elasticsearch-spark-20_$scalaVersion:7.7.0")
- include dependency("org.apache.httpcomponents:httpclient:4.5.12")
- include dependency("org.mybatis:mybatis:3.5.9")
- include dependency('com.zaxxer:HikariCP:2.6.1')
- include dependency('io.github.coolbeevip:flyway-core:9.15.2.2')
- include dependency('io.github.coolbeevip:flyway-mysql:9.15.2.2')
- include dependency("mysql:mysql-connector-java:8.0.19")
- include dependency("com.oracle.ojdbc:ojdbc8:19.3.0.0")
- include dependency("org.apache.spark:spark-sql-kafka-0-10_$scalaVersion:$sparkVersion")
- if (scalaVersion.startsWith('2.11')) {
- include dependency("org.apache.kudu:kudu-spark2_$scalaVersion:1.9.0")
- } else if (sparkVersion.startsWith("3")) {
- include dependency("org.apache.kudu:kudu-spark3_$scalaVersion:1.15.0")
- }
- if (sparkVersion.startsWith("3.5")) {
- addDeps("io.circe:circe-yaml_$scalaVersion:0.15.0-RC1")
- addDeps("io.circe:circe-generic_$scalaVersion:0.15.0-M1")
- addDeps("io.circe:circe-generic-extras_$scalaVersion:0.14.3")
- } else {
- addDeps("io.circe:circe-yaml_$scalaVersion:0.11.0-M1")
- addDeps("io.circe:circe-generic_$scalaVersion:0.12.0-M3")
- addDeps("io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3")
- }
-
- include dependency("org.postgresql:postgresql:42.2.14")
- include dependency("com.jcraft:jsch:0.1.55")
- include dependency("com.cloudera:ImpalaJDBC41:2.6.3")
- include dependency("com.microsoft.sqlserver:mssql-jdbc:9.4.0.jre8")
- include dependency('net.sourceforge.jtds:jtds:1.3.1')
- include dependency("com.ibm.informix:jdbc:4.10.14")
- include dependency("org.jpmml:pmml-evaluator-extension:1.5.3")
- include dependency("org.jpmml:pmml-evaluator:1.5.3")
- include dependency("org.jpmml:pmml-model:1.5.3")
- include dependency("org.jpmml:pmml-agentn:1.5.3")
- include dependency("com.google.guava:guava:29.0-jre")
- include dependency("com.jayway.jsonpath:json-path:2.6.0")
- include dependency("com.google.guava:failureaccess:1.0.1")
- include dependency("javax.mail:mail:1.4.7")
-
- // spark-excel start
- include dependency("xml-resolver:xml-resolver:1.2")
- include dependency("xml-apis:xml-apis:1.4.01")
- include dependency("org.slf4j:slf4j-api:1.7.25")
- include dependency("org.joda:joda-convert:2.0.1")
- include dependency("org.apache.xmlbeans:xmlbeans:3.1.0")
- include dependency("org.apache.poi:poi:4.1.0")
- include dependency("org.apache.poi:poi-ooxml:4.1.0")
- include dependency("org.apache.poi:poi-ooxml-schemas:4.1.0")
- include dependency("org.apache.commons:commons-math3:3.6.1")
- include dependency("org.apache.commons:commons-collections4:4.3")
- include dependency("org.apache.commons:commons-compress:1.19")
- include dependency("joda-time:joda-time:2.9.9")
- include dependency("edu.princeton.cup:java-cup:10k")
- include dependency("commons-net:commons-net:3.1")
- include dependency("commons-codec:commons-codec:1.12")
- include dependency("com.rackspace.eclipse.webtools.sourceediting:org.eclipse.wst.xml.xpath2.processor:2.1.100")
- include dependency("com.rackspace.apache:xerces2-xsd11:2.11.1")
- include dependency("com.monitorjbl:xlsx-streamer:2.1.0")
- include dependency("com.ibm.icu:icu4j:4.6")
- include dependency("com.github.virtuald:curvesapi:1.06")
- include dependency("com.crealytics:spark-excel_$scalaVersion:0.13.1")
- include dependency('info.picocli:picocli:4.6.3')
- include dependency("net.liftweb:lift-json_$scalaVersion:3.4.3")
-
- if (sparkVersion.startsWith("3.5")) {
- addDeps("io.delta:delta-spark_$scalaVersion:3.0.0")
- } else if (sparkVersion.startsWith("3.4")) {
- addDeps("io.delta:delta-core_$scalaVersion:2.4.0")
- } else if (sparkVersion.startsWith("3.3")) {
- addDeps("io.delta:delta-core_$scalaVersion:2.3.0")
- } else if (sparkVersion.startsWith("3.2")) {
- addDeps("io.delta:delta-core_$scalaVersion:2.0.2")
- } else if (sparkVersion.startsWith("3.1")) {
- addDeps("io.delta:delta-core_$scalaVersion:1.0.1")
- } else if (sparkVersion.startsWith("3.0")) {
- addDeps("io.delta:delta-core_$scalaVersion:0.8.0")
- } else if (sparkVersion.startsWith("2")) {
- addDeps("io.delta:delta-core_$scalaVersion:0.6.1")
- }
-
- if (profiles.contains("GCP")) {
- if (sparkVersion.startsWith("2.3")) {
- include dependency("com.google.cloud.spark:spark-bigquery-with-dependencies_$scalaVersion:0.26.0")
- } else if (sparkVersion.startsWith("2.4")) {
- include dependency("com.google.cloud.spark:spark-2.4-bigquery:0.26.0-preview")
- } else if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.2")) {
- include dependency("com.google.cloud.spark:spark-3.1-bigquery:0.26.0-preview")
- } else if (sparkVersion.startsWith("3.0")) {
- include dependency("com.google.cloud.spark:spark-bigquery-with-dependencies_$scalaVersion:0.26.0")
- }
- }
- }
- relocate("com.google", "com.github.sharpdata.sharpetl.google") {
- include 'com/google/guava/**'
- }
- relocate "org.apache.commons.net", "com.github.sharpdata.sharpetl.commons.net"
- relocate "com.zaxxer.hikari", "com.github.sharpdata.sharpetl.hikari"
- //from '../hadoop'
- archiveFileName = "sharp-etl-spark-standalone-${sparkVersion}_${scalaVersion}-${version}.jar"
- mergeServiceFiles {
- // https://github.com/flyway/flyway/issues/3482#issuecomment-1493367875
- setPath("META-INF/services/org.flywaydb.core.extensibility.Plugin")
- }
-}
-
-task clearJar(type: Delete) {
- delete 'build/libs/lib'
-}
-
-tasks.named('processTestResources') {
- duplicatesStrategy = 'EXCLUDE'
-}
-
-task copyDependencies(type: Copy) {
- from(project.parent.allprojects.configurations.compileClasspath)
- into('build/libs/lib')
- include("scala-collection-compat_$scalaVersion-2.6.0.jar")
- include("mvel2-2.4.13.Final.jar")
- include("fastparse_$scalaVersion-3.0.0.jar")
- include("classgraph-4.8.149.jar")
- if (sparkVersion.startsWith("2.3")) {
- include("elasticsearch-spark-20_$scalaVersion-7.7.0.jar")
- } else if (sparkVersion.startsWith("2.4")) {
- include("elasticsearch-spark-20_$scalaVersion-7.14.0.jar")
- } else if (sparkVersion.startsWith("3") & scalaVersion.startsWith("2.12")) {
- include("elasticsearch-spark-30_$scalaVersion-7.16.2.jar")
- } else if (sparkVersion.startsWith("3") & scalaVersion.startsWith("2.13")) {
- include("elasticsearch-spark-30_$scalaVersion-8.1.0.jar")
- }
- include("jasypt-1.9.3.jar")
- include("httpclient-4.5.12.jar")
- include("mybatis-3.5.9.jar")
- include("HikariCP-2.6.1.jar")
- include("flyway-core-9.15.2.2.jar")
- include("flyway-core-9.15.2.2.jar")
- include("mysql-connector-java-8.0.19.jar")
- include("ojdbc8-19.3.0.0.jar")
- include("aliyun-sdk-oss-3.16.0.jar")
- include("spark-sql-kafka-0-10_$scalaVersion-${sparkVersion}.jar")
- if (scalaVersion.startsWith('2.11')) {
- include("kudu-spark2_$scalaVersion-1.9.0.jar")
- } else if (sparkVersion.startsWith("3")) {
- include("kudu-spark3_$scalaVersion-1.15.0.jar")
- }
- include("circe-yaml_$scalaVersion-0.11.0-M1.jar")
- include("circe-core_$scalaVersion-0.12.0-M3.jar")
- include("circe-generic_$scalaVersion-0.12.0-M3.jar")
- include("circe-generic-extras_$scalaVersion-0.12.0-M3.jar")
- include("cats-core_$scalaVersion-2.0.0-M4.jar")
- include("cats-kernel_$scalaVersion-2.0.0-M4.jar")
- include("postgresql-42.2.14.jar")
- include("jsch-0.1.55.jar")
- include("ImpalaJDBC41-2.6.3.jar")
- include("mssql-jdbc-9.4.0.jre8.jar")
- include("jtds-1.3.1.jar")
- include("jdbc-4.10.14.jar") // informix
- include("pmml-evaluator-extension-1.5.3.jar")
- include("pmml-evaluator-1.5.3.jar")
- include("pmml-model-1.5.3.jar")
- include("pmml-agent-1.5.3.jar")
- include("picocli-4.6.3.jar")
- include("json-path-2.6.0.jar")
- include("mail-1.4.7.jar")
-
- // spark-excel start
- include("xml-resolver-1.2.jar")
- include("xml-apis-1.4.01.jar")
- include("slf4j-api-1.7.25.jar")
- include("joda-convert-2.0.1.jar")
- include("xmlbeans-3.1.0.jar")
- include("poi-4.1.0.jar")
- include("poi-ooxml-4.1.0.jar")
- include("poi-ooxml-schemas-4.1.0.jar")
- include("commons-math3-3.6.1.jar")
- include("commons-collections4-4.3.jar")
- include("commons-compress-1.19.jar")
- include("joda-time-2.9.9.jar")
- include("java-cup-10k.jar")
- include("commons-net-3.1.jar")
- include("commons-codec-1.12.jar")
- include("org.eclipse.wst.xml.xpath2.processor-2.1.100.jar")
- include("xerces2-xsd11-2.11.1.jar")
- include("xlsx-streamer-2.1.0.jar")
- include("icu4j-4.6.jar")
- include("curvesapi-1.06.jar")
- include("spark-excel_$scalaVersion-0.13.1.jar")
- include("picocli-4.6.3.jar")
- // spark-excel end
- include("lift-json_$scalaVersion-3.4.3")
-
- if (sparkVersion.startsWith("2.3")) {
- include("spark-bigquery-with-dependencies_$scalaVersion-0.26.0.jar")
- } else if (sparkVersion.startsWith("2.4")) {
- include("spark-2.4-bigquery-0.26.0-preview.jar")
- } else if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.2")) {
- include("spark-3.1-bigquery-0.26.0-preview.jar")
- } else if (sparkVersion.startsWith("3.0")) {
- include("spark-bigquery-with-dependencies_$scalaVersion-0.26.0.jar")
- }
-}
-
-task buildJars(type: Copy, dependsOn: [":spark:clearJar", ":spark:shadowJar", ":spark:slimJar", ":spark:copyDependencies"])
-
-task sourceJar(type: Jar) {
- classifier = 'sources'
- from sourceSets.main.allSource
-}
-
-scalastyle {
- config = file("${rootDir}/scalastyle_config.xml") // path to scalastyle config xml file
- failOnWarning = true
- sourceSets {
- test {
- skip = true
- }
- }
-}
-
-sourceSets {
- main {
- resources {
- srcDirs += [
- project(':core').sourceSets.main.resources
- ]
- }
- }
-
- test {
- resources {
- srcDirs += [
- project(':data-modeling').sourceSets.test.resources
- ]
- }
- //runtimeClasspath -= files(sourceSets.main.output.resourcesDir)
- }
-}
-
-task hiveSuit(type: Test) {
- suite 'com.github.sharpdata.sharpetl.spark.end2end.hive.HiveSuitExecutor'
-}
-
-task deltaSuit(type: Test) {
- suite 'com.github.sharpdata.sharpetl.spark.end2end.delta.DeltaSuitExecutor'
-}
-
-tasks.named('test') {
- dependsOn(deltaSuit)
- dependsOn(hiveSuit)
-}
-
-if (scalaVersion.startsWith('2.11')) {
- if (sparkVersion.startsWith("2.3")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/scala', 'src/main/spark_2.3_scala_211']
- }
- }
- test {
- scala {
- srcDirs = ['src/test/scala']
- }
- }
- }
- } else if (sparkVersion.startsWith("2.4")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/scala', 'src/main/spark_2.4_scala_211']
- }
- }
- test {
- scala {
- srcDirs = ['src/test/scala']
- }
- }
- }
- }
-} else {
- if (sparkVersion.startsWith("2.4")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/scala', 'src/main/spark_2.4_scala_212']
- }
- }
- test {
- scala {
- srcDirs = ['src/test/scala']
- }
- }
- }
- } else if (sparkVersion.startsWith("3.1") || sparkVersion.startsWith("3.0")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/scala', 'src/main/spark_3.1_scala_212']
- }
- }
- test {
- scala {
- srcDirs = ['src/test/scala']
- }
- }
- }
- } else if (sparkVersion.startsWith("3.2") || sparkVersion.startsWith("3.3") || sparkVersion.startsWith("3.4")|| sparkVersion.startsWith("3.5")) {
- sourceSets {
- main {
- scala {
- srcDirs = ['src/main/scala', 'src/main/spark_3.2_scala_212']
- }
- }
- test {
- scala {
- srcDirs = ['src/test/scala']
- }
- }
- }
- }
-}
diff --git a/spark/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin b/spark/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
deleted file mode 100644
index 3d7b6a7..0000000
--- a/spark/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
+++ /dev/null
@@ -1 +0,0 @@
-com.github.sharpdata.sharpetl.spark.extra.flyway.hive.HiveDatabaseType
\ No newline at end of file
diff --git a/spark/src/main/resources/application.properties b/spark/src/main/resources/application.properties
deleted file mode 100644
index da55dc5..0000000
--- a/spark/src/main/resources/application.properties
+++ /dev/null
@@ -1,70 +0,0 @@
-etl.workflow.path=tasks
-etl.default.jobId.column=job_id
-etl.default.jobTime.column=job_time
-flyway.driver=com.mysql.cj.jdbc.Driver
-flyway.url=jdbc:mysql://localhost/sharp_etl?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&autoReconnect=true
-flyway.username=root
-flyway.password=root
-spark.default.spark.sql.cbo.enabled=true
-spark.default.spark.sql.adaptive.enabled=true
-spark.default.spark.sql.adaptive.logLevel=info
-spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
-spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
-spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
-spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
-spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
-spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
-spark.default.spark.sql.autoBroadcastJoinThreshold=-1
-spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
-spark.default.hive.exec.dynamic.partition=true
-spark.default.hive.exec.dynamic.partition.mode=nonstrict
-spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
-spark.default.spark.serializer=org.apache.spark.serializer.KryoSerializer
-spark.default.spark.kryoserializer.buffer.max=128m
-spark.default.spark.sql.crossJoin.enabled=true
-spark.default.spark.driver.cores=1
-spark.default.spark.driver.memory=1g
-spark.default.spark.driver.memoryOverhead=1g
-spark.default.spark.driver.maxResultSize=0
-spark.default.spark.executor.cores=2
-spark.default.spark.executor.memory=4g
-spark.default.spark.executor.memoryOverhead=2g
-spark.default.spark.dynamicAllocation.enabled=true
-spark.default.spark.shuffle.service.enabled=true
-spark.default.spark.dynamicAllocation.minExecutors=1
-spark.default.spark.dynamicAllocation.maxExecutors=4
-spark.default.spark.streaming.stopGracefullOnShutdown=true
-spark.default.spark.streaming.backpressure.enable=true
-spark.default.spark.streaming.kafka.maxRatePerPartition=100000
-
-local_test.mysql.url=jdbc:mysql://localhost:2333/local_test
-local_test.mysql.user=root
-local_test.mysql.password=root
-local_test.mysql.driver=com.mysql.cj.jdbc.Driver
-local_test.mysql.fetchsize=1000
-
-sales.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
-sales.postgres.user=postgres
-sales.postgres.password=postgres
-sales.postgres.driver=org.postgresql.Driver
-sales.postgres.fetchsize=10
-
-postgres.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
-postgres.postgres.user=postgres
-postgres.postgres.password=postgres
-postgres.postgres.driver=org.postgresql.Driver
-postgres.postgres.fetchsize=10
-
-sysmaster.informix.url=jdbc:informix-sqli://localhost:9088/sysmaster:INFORMIXSERVER=informix;DELIMIDENT=Y
-sysmaster.informix.user=informix
-sysmaster.informix.password=in4mix
-sysmaster.informix.driver=com.informix.jdbc.IfxDriver
-sysmaster.informix.fetchsize=100
-
-kafka.producer.kafka.bootstrap.servers=localhost:9092
-
-kafka.consumer.kafka.bootstrap.servers=localhost:9092
-kafka.consumer.startingOffsets=earliest
diff --git a/spark/src/main/resources/log4j-driver.properties b/spark/src/main/resources/log4j-driver.properties
deleted file mode 100644
index 3052921..0000000
--- a/spark/src/main/resources/log4j-driver.properties
+++ /dev/null
@@ -1,58 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=INFO, ETLLogger, infoRollingFile
-log4j.additivity.ETLLogger=false
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=${spark.yarn.app.container.log.dir}/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p - %m%n
-log4j.appender.ETLLogger.Encoding=UTF-8
-
-# MetricLogger
-log4j.logger.MetricLogger=INFO, MetricLogger, infoRollingFile
-log4j.additivity.MetricLogger=false
-log4j.appender.MetricLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.MetricLogger.File=${spark.yarn.app.container.log.dir}/MetricLogger.log
-log4j.appender.MetricLogger.Append=true
-log4j.appender.MetricLogger.MaxFileSize=16MB
-log4j.appender.MetricLogger.MaxBackupIndex=3
-log4j.appender.MetricLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.MetricLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.MetricLogger.Encoding=UTF-8
-
-# 全局日志
-log4j.rootLogger=WARN, infoRollingFile, console
-
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-
-
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=${spark.yarn.app.container.log.dir}/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.infoRollingFile.Encoding=UTF-8
-
-shell.log.level=WARN
-log4j.logger.org.apache.ibatis=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
diff --git a/spark/src/main/resources/log4j-executor.properties b/spark/src/main/resources/log4j-executor.properties
deleted file mode 100644
index dfa7ea8..0000000
--- a/spark/src/main/resources/log4j-executor.properties
+++ /dev/null
@@ -1,46 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=INFO, ETLLogger, infoRollingFile
-log4j.additivity.ETLLogger=true
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=${spark.yarn.app.container.log.dir}/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p - %m%n
-log4j.appender.ETLLogger.Encoding=UTF-8
-
-# 全局日志
-log4j.rootLogger=WARN, infoRollingFile, console
-
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-
-
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=${spark.yarn.app.container.log.dir}/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.infoRollingFile.Encoding=UTF-8
-
-shell.log.level=WARN
-log4j.logger.org.apache.ibatis=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
diff --git a/spark/src/main/resources/log4j.properties b/spark/src/main/resources/log4j.properties
deleted file mode 100644
index cb21f94..0000000
--- a/spark/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,57 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=DEBUG, console, ETLLogger, infoRollingFile
-log4j.additivity.ETLLogger=false
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=./logs/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p - %m%n
-log4j.appender.ETLLogger.Encoding=UTF-8
-
-# MetricLogger
-log4j.logger.MetricLogger=TRACE, console, MetricLogger, infoRollingFile
-log4j.additivity.MetricLogger=false
-log4j.appender.MetricLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.MetricLogger.File=./logs/MetricLogger.log
-log4j.appender.MetricLogger.Append=true
-log4j.appender.MetricLogger.MaxFileSize=16MB
-log4j.appender.MetricLogger.MaxBackupIndex=3
-log4j.appender.MetricLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.MetricLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.MetricLogger.Encoding=UTF-8
-
-# 全局日志
-log4j.rootLogger=WARN, infoRollingFile, console
-
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=./logs/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.infoRollingFile.Encoding=UTF-8
-
-shell.log.level=WARN
-log4j.logger.org.apache.ibatis=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
diff --git a/spark/src/main/resources/mybatis-config.xml b/spark/src/main/resources/mybatis-config.xml
deleted file mode 100644
index dfaa67d..0000000
--- a/spark/src/main/resources/mybatis-config.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/spark/src/main/resources/quality-check.yaml b/spark/src/main/resources/quality-check.yaml
deleted file mode 100644
index 94c707d..0000000
--- a/spark/src/main/resources/quality-check.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-- dataCheckType: power null check
- rule: powerNullCheck($column)
- errorType: error
-- dataCheckType: null check
- rule: $column IS NULL
- errorType: error
-- dataCheckType: duplicated check
- rule: UDR.com.github.sharpdata.sharpetl.core.quality.udr.DuplicatedCheck
- errorType: warn
-- dataCheckType: mismatch dim check
- rule: $column = '-1'
- errorType: warn
\ No newline at end of file
diff --git a/spark/src/main/resources/streaming-log4j-driver.properties b/spark/src/main/resources/streaming-log4j-driver.properties
deleted file mode 100644
index dc5dd8c..0000000
--- a/spark/src/main/resources/streaming-log4j-driver.properties
+++ /dev/null
@@ -1,72 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=INFO, ETLLogger
-log4j.additivity.ETLLogger=true
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=${spark.yarn.app.container.log.dir}/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.ETLLogger.encoding=UTF-8
-# MetricLogger
-log4j.logger.MetricLogger=INFO, MetricLogger
-log4j.additivity.MetricLogger=true
-log4j.appender.MetricLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.MetricLogger.File=${spark.yarn.app.container.log.dir}/MetricLogger.log
-log4j.appender.MetricLogger.Append=true
-log4j.appender.MetricLogger.MaxFileSize=16MB
-log4j.appender.MetricLogger.MaxBackupIndex=3
-log4j.appender.MetricLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.MetricLogger.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.MetricLogger.encoding=UTF-8
-# 全局日志
-log4j.rootLogger=INFO, infoRollingFile, stderrRollingFile, stdoutRollingFile, console
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-# stdout 滚动文件输出
-log4j.appender.stdoutRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.stdoutRollingFile.File=${spark.yarn.app.container.log.dir}/stdout
-log4j.appender.stdoutRollingFile.Threshold=OFF
-log4j.appender.stdoutRollingFile.Append=true
-log4j.appender.stdoutRollingFile.MaxFileSize=16MB
-log4j.appender.stdoutRollingFile.MaxBackupIndex=3
-log4j.appender.stdoutRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdoutRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.stdoutRollingFile.encoding=UTF-8
-# stderr 滚动文件输出
-log4j.appender.stderrRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.stderrRollingFile.File=${spark.yarn.app.container.log.dir}/stderr
-log4j.appender.stderrRollingFile.Threshold=OFF
-log4j.appender.stderrRollingFile.Append=true
-log4j.appender.stderrRollingFile.MaxFileSize=16MB
-log4j.appender.stderrRollingFile.MaxBackupIndex=3
-log4j.appender.stderrRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderrRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.stderrRollingFile.encoding=UTF-8
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=${spark.yarn.app.container.log.dir}/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.infoRollingFile.encoding=UTF-8
-shell.log.level=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.apache.ibatis=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
diff --git a/spark/src/main/resources/streaming-log4j-executor.properties b/spark/src/main/resources/streaming-log4j-executor.properties
deleted file mode 100644
index dc5dd8c..0000000
--- a/spark/src/main/resources/streaming-log4j-executor.properties
+++ /dev/null
@@ -1,72 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=INFO, ETLLogger
-log4j.additivity.ETLLogger=true
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=${spark.yarn.app.container.log.dir}/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.ETLLogger.encoding=UTF-8
-# MetricLogger
-log4j.logger.MetricLogger=INFO, MetricLogger
-log4j.additivity.MetricLogger=true
-log4j.appender.MetricLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.MetricLogger.File=${spark.yarn.app.container.log.dir}/MetricLogger.log
-log4j.appender.MetricLogger.Append=true
-log4j.appender.MetricLogger.MaxFileSize=16MB
-log4j.appender.MetricLogger.MaxBackupIndex=3
-log4j.appender.MetricLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.MetricLogger.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.MetricLogger.encoding=UTF-8
-# 全局日志
-log4j.rootLogger=INFO, infoRollingFile, stderrRollingFile, stdoutRollingFile, console
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-# stdout 滚动文件输出
-log4j.appender.stdoutRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.stdoutRollingFile.File=${spark.yarn.app.container.log.dir}/stdout
-log4j.appender.stdoutRollingFile.Threshold=OFF
-log4j.appender.stdoutRollingFile.Append=true
-log4j.appender.stdoutRollingFile.MaxFileSize=16MB
-log4j.appender.stdoutRollingFile.MaxBackupIndex=3
-log4j.appender.stdoutRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdoutRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.stdoutRollingFile.encoding=UTF-8
-# stderr 滚动文件输出
-log4j.appender.stderrRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.stderrRollingFile.File=${spark.yarn.app.container.log.dir}/stderr
-log4j.appender.stderrRollingFile.Threshold=OFF
-log4j.appender.stderrRollingFile.Append=true
-log4j.appender.stderrRollingFile.MaxFileSize=16MB
-log4j.appender.stderrRollingFile.MaxBackupIndex=3
-log4j.appender.stderrRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderrRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.stderrRollingFile.encoding=UTF-8
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=${spark.yarn.app.container.log.dir}/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p [%c] - %m%n
-log4j.appender.infoRollingFile.encoding=UTF-8
-shell.log.level=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.apache.ibatis=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
diff --git a/spark/src/main/resources/tasks/batch_kafka_test.sql b/spark/src/main/resources/tasks/batch_kafka_test.sql
deleted file mode 100755
index 58b09f0..0000000
--- a/spark/src/main/resources/tasks/batch_kafka_test.sql
+++ /dev/null
@@ -1,11 +0,0 @@
--- workflow=batch_kafka_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=batch_kafka
--- topics=test
--- groupId=0
--- schemaDDL=`id` STRING,`name` STRING
--- target=console
diff --git a/spark/src/main/resources/tasks/demo/do_nothing.sql b/spark/src/main/resources/tasks/demo/do_nothing.sql
deleted file mode 100644
index e5bf244..0000000
--- a/spark/src/main/resources/tasks/demo/do_nothing.sql
+++ /dev/null
@@ -1,10 +0,0 @@
--- workflow=do_nothing
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=temp
--- tableName=do_nothing_table
-SELECT 'success';
diff --git a/spark/src/main/resources/tasks/demo/excel_test.sql b/spark/src/main/resources/tasks/demo/excel_test.sql
deleted file mode 100644
index 42dce66..0000000
--- a/spark/src/main/resources/tasks/demo/excel_test.sql
+++ /dev/null
@@ -1,39 +0,0 @@
--- workflow=excel_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=excel
--- onlyOneName=true
--- configPrefix=cp_dmp
--- fileNamePattern=()(\d{4}年\d{2}月国代任务数据\.xlsx)()
--- dataAddress=A1
--- target=temp
--- tableName=temp
-
--- step=2
--- source=temp
--- target=variables
-select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyyMMddHH'), 'yyyyMMdd') as `DATE_END`;
-
--- step=3
--- source=temp
--- target=console
-select `Level` as level,
- `Spu_code` as spu_code,
- `Sku_code` as sku_code,
- `系列` as series,
- `型号` as model,
- `制式` as standard,
- `SI任务` as si_task,
- `SO任务` as so_task,
- `市场健康度-A类-目标值` as market_health_a_target_value,
- `市场健康度-A类-底线值` as market_health_a_bottom_value,
- `市场健康度-B类-目标值` as market_health_b_target_value,
- `市场健康度-B类-底线值` as market_health_b_bottom_value,
- `市场健康度-二级市场管控-目标` as market_health_2_target_value,
- `file_name` as file_name,
- ${TASK_ID} as task_id,
- ${DATE_END} as ods_insert_date
-from temp;
diff --git a/spark/src/main/resources/tasks/demo/hdfs_to_ftp_test.sql b/spark/src/main/resources/tasks/demo/hdfs_to_ftp_test.sql
deleted file mode 100644
index 0a06dfd..0000000
--- a/spark/src/main/resources/tasks/demo/hdfs_to_ftp_test.sql
+++ /dev/null
@@ -1,13 +0,0 @@
--- workflow=hdfs_to_ftp_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=hdfs
--- configPrefix=dw
--- fileNamePattern=()(dw_card_3m_event_.+\.gz)()
--- target=ftp
--- configPrefix=dw
--- fileDir=/hairui/output/dw
--- writeMode=overwrite
diff --git a/spark/src/main/resources/tasks/demo/json_to_hdfs.sql b/spark/src/main/resources/tasks/demo/json_to_hdfs.sql
deleted file mode 100644
index bd33ba1..0000000
--- a/spark/src/main/resources/tasks/demo/json_to_hdfs.sql
+++ /dev/null
@@ -1,40 +0,0 @@
--- workflow=json_to_hdfs
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=ftp
--- configPrefix=ehr
--- fileNamePattern=()(Org_\d{14}\.json)()
--- target=hdfs
--- configPrefix=ehr
--- writeMode=overwrite
-
--- step=2
--- source=json
--- configPrefix=ehr
--- fileNamePattern=()(Org_\d{14}\.json)()
--- multiline=false
--- target=temp
--- tableName=ehr_org
-
--- step=3
--- source=temp
--- target=hdfs
--- separator=\t
--- filePath=/tmp/load/csv/ods_ehr_org.csv
-select line.dataStatus as data_status,
- line.input_field1 as input_field1,
- line.id as id,
- line.name as name,
- line.director_id as director_id,
- line.idCard as id_card,
- line.parent_id as parent_id,
- line.attribute as attribute,
- line.path_depth as path_depth,
- ${TASK_ID} as task_id,
- file_name,
- ${DATA_RANGE_END} as ods_insert_date
-from ehr_org
- LATERAL VIEW explode(data) line AS line;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/demo/mysql_append.sql b/spark/src/main/resources/tasks/demo/mysql_append.sql
deleted file mode 100644
index 45d52d2..0000000
--- a/spark/src/main/resources/tasks/demo/mysql_append.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- workflow=mysql_append
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=hive
--- target=mysql
--- dbName=cp_dmp_dm
--- tableName=ADS_SALES_SO
--- writeMode=append
-SELECT LOCATION,
- PROD_ID,
- COMPANY_TYPE,
- SO_NUM,
- `DATE`,
- WEEK_TO_DAY,
- MONTH_TO_DAY,
- LAST_WEEK,
- SAME_DAY_LAST_YEAR_SO,
- YESTERDAY_SO,
- LAST_7_DAY,
- LAST_14_DAY,
- LAST_21_DAY,
- AVG_WEEK_DAILY,
- AVG_MONTH_DAILY,
- LAST_MONTH_AVG_MONTH_DAILY,
- DM_INSERT_DATE_HOUR
-FROM DM_SALES_SO
-WHERE DM_INSERT_DATE_HOUR = '2021012702'
- AND `date` >= '2020-08-01' and `date` < '2020-10-01'
-limit 10;
diff --git a/spark/src/main/resources/tasks/demo/mysql_upsert.sql b/spark/src/main/resources/tasks/demo/mysql_upsert.sql
deleted file mode 100644
index 71b7b25..0000000
--- a/spark/src/main/resources/tasks/demo/mysql_upsert.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- workflow=mysql_upsert
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=hive
--- target=mysql
--- dbName=cp_dmp_dm
--- tableName=ADS_SALES_SO
--- writeMode=upsert
-SELECT LOCATION,
- PROD_ID,
- COMPANY_TYPE,
- SO_NUM,
- `DATE`,
- WEEK_TO_DAY,
- MONTH_TO_DAY,
- LAST_WEEK,
- SAME_DAY_LAST_YEAR_SO,
- YESTERDAY_SO,
- LAST_7_DAY,
- LAST_14_DAY,
- LAST_21_DAY,
- AVG_WEEK_DAILY,
- AVG_MONTH_DAILY,
- LAST_MONTH_AVG_MONTH_DAILY,
- DM_INSERT_DATE_HOUR
-FROM DM_SALES_SO
-WHERE DM_INSERT_DATE_HOUR = '2021012702'
- AND `date` >= '2020-08-01' and `date` < '2020-10-01'
-limit 10;
diff --git a/spark/src/main/resources/tasks/demo/read_informix.sql b/spark/src/main/resources/tasks/demo/read_informix.sql
deleted file mode 100644
index ba99a53..0000000
--- a/spark/src/main/resources/tasks/demo/read_informix.sql
+++ /dev/null
@@ -1,41 +0,0 @@
--- workflow=read_informix
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=informix
--- dbName=sysmaster
--- tableName=online_order
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_fact_online_order
--- writeMode=append
-SELECT order_no AS order_no,
- user_id AS user_id,
- user_name AS user_name,
- order_total_amount AS order_total_amount,
- actual_amount AS actual_amount,
- post_amount AS post_amount,
- order_pay_amount AS order_pay_amount,
- total_discount AS total_discount,
- pay_type AS pay_type,
- source_type AS source_type,
- order_status AS order_status,
- note AS note,
- confirm_status AS confirm_status,
- payment_time AS payment_time,
- delivery_time AS delivery_time,
- receive_time AS receive_time,
- comment_time AS comment_time,
- delivery_company AS delivery_company,
- delivery_code AS delivery_code,
- business_date AS business_date,
- return_flag AS return_flag,
- created_at AS created_at,
- updated_at AS updated_at,
- deleted_at AS deleted_at,
- '${JOB_ID}' AS job_id,
- CURRENT YEAR TO SECOND AS job_time
-FROM online_order
-WHERE business_date >= TO_DATE('${DATA_RANGE_START}') AND business_date < TO_DATE('${DATA_RANGE_END}');
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/demo/sales.online_order.sql b/spark/src/main/resources/tasks/demo/sales.online_order.sql
deleted file mode 100644
index 801c183..0000000
--- a/spark/src/main/resources/tasks/demo/sales.online_order.sql
+++ /dev/null
@@ -1,41 +0,0 @@
--- workflow=sales.online_order
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.online_order
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_fact_online_order
--- writeMode=append
-SELECT "order_no" AS "order_no",
-"user_id" AS "user_id",
-"user_name" AS "user_name",
-"order_total_amount" AS "order_total_amount",
-"actual_amount" AS "actual_amount",
-"post_amount" AS "post_amount",
-"order_pay_amount" AS "order_pay_amount",
-"total_discount" AS "total_discount",
-"pay_type" AS "pay_type",
-"source_type" AS "source_type",
-"order_status" AS "order_status",
-"note" AS "note",
-"confirm_status" AS "confirm_status",
-"payment_time" AS "payment_time",
-"delivery_time" AS "delivery_time",
-"receive_time" AS "receive_time",
-"comment_time" AS "comment_time",
-"delivery_company" AS "delivery_company",
-"delivery_code" AS "delivery_code",
-"business_date" AS "business_date",
-"return_flag" AS "return_flag",
-"created_at" AS "created_at",
-"updated_at" AS "updated_at",
-"deleted_at" AS "deleted_at",
-'${JOB_ID}' AS "job_id",
-now() AS "job_time"
-FROM "postgres"."sales"."online_order"
-WHERE "business_date" >= '${DATA_RANGE_START}' AND "business_date" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/demo/sink_to_kafka_test.sql b/spark/src/main/resources/tasks/demo/sink_to_kafka_test.sql
deleted file mode 100644
index 8e07a70..0000000
--- a/spark/src/main/resources/tasks/demo/sink_to_kafka_test.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- workflow=sink_to_kafka_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=batch_kafka
--- topics=sink-to-kafka-6
-with data as (select 'user_id_1' as `user_id`,
- 'user_name_1' as `user_name`
- union all
- select 'user_id_2' as `user_id`,
- 'user_name_2' as `user_name`)
-select *
-from data;
diff --git a/spark/src/main/resources/tasks/demo/test_rand_model.sql b/spark/src/main/resources/tasks/demo/test_rand_model.sql
deleted file mode 100644
index 8f3ae6d..0000000
--- a/spark/src/main/resources/tasks/demo/test_rand_model.sql
+++ /dev/null
@@ -1,407 +0,0 @@
--- workflow=test_rand_model
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=pmml
--- className=com.github.sharpdata.sharpetl.spark.extension.PmmlUDF
--- pmmlFileName=test_rand_model.pmml
--- target=udf
--- methodName=predict
--- udfName=predict
-
--- step=2
--- source=hive
--- target=temp
--- tableName=temp1
-select cast(1.0 as float) as x1,
- cast(1.0 as float) as x2,
- cast(1.0 as float) as x3,
- cast(1.0 as float) as x4,
- cast(1.0 as float) as x5,
- cast(1.0 as float) as x6,
- cast(1.0 as float) as x7,
- cast(1.0 as float) as x8,
- cast(1.0 as float) as x9,
- cast(1.0 as float) as x10,
- cast(1.0 as float) as x11,
- cast(1.0 as float) as x12,
- cast(1.0 as float) as x13,
- cast(1.0 as float) as x14,
- cast(1.0 as float) as x15,
- cast(1.0 as float) as x16,
- cast(1.0 as float) as x17,
- cast(1.0 as float) as x18,
- cast(1.0 as float) as x19,
- cast(1.0 as float) as x20,
- cast(1.0 as float) as x21,
- cast(1.0 as float) as x22,
- cast(1.0 as float) as x23,
- cast(1.0 as float) as x25,
- cast(1.0 as float) as x26,
- cast(1.0 as float) as x27,
- cast(1.0 as float) as x28,
- cast(1.0 as float) as x29,
- cast(1.0 as float) as x30,
- cast(1.0 as float) as x31,
- cast(1.0 as float) as x32,
- cast(1.0 as float) as x34,
- cast(1.0 as float) as x35,
- cast(1.0 as float) as x36,
- cast(1.0 as float) as x37,
- cast(1.0 as float) as x38,
- cast(1.0 as float) as x39,
- cast(1.0 as float) as x40,
- cast(1.0 as float) as x41,
- cast(1.0 as float) as x42,
- cast(1.0 as float) as x43,
- cast(1.0 as float) as x44,
- cast(1.0 as float) as x45,
- cast(1.0 as float) as x46,
- cast(1.0 as float) as x47,
- cast(1.0 as float) as x48,
- cast(1.0 as float) as x49,
- cast(1.0 as float) as x50,
- cast(1.0 as float) as x51,
- cast(1.0 as float) as x52,
- cast(1.0 as float) as x53,
- cast(1.0 as float) as x54,
- cast(1.0 as float) as x55,
- cast(1.0 as float) as x56,
- cast(1.0 as float) as x58,
- cast(1.0 as float) as x59,
- cast(1.0 as float) as x60,
- cast(1.0 as float) as x61,
- cast(1.0 as float) as x62,
- cast(1.0 as float) as x63,
- cast(1.0 as float) as x65,
- cast(1.0 as float) as x66,
- cast(1.0 as float) as x67,
- cast(1.0 as float) as x68,
- cast(1.0 as float) as x69,
- cast(1.0 as float) as x70,
- cast(1.0 as float) as x71,
- cast(1.0 as float) as x72,
- cast(1.0 as float) as x73,
- cast(1.0 as float) as x74,
- cast(1.0 as float) as x75,
- cast(1.0 as float) as x76,
- cast(1.0 as float) as x77,
- cast(1.0 as float) as x78,
- cast(1.0 as float) as x79,
- cast(1.0 as float) as x80,
- cast(1.0 as float) as x81,
- cast(1.0 as float) as x85,
- cast(1.0 as float) as x86,
- cast(1.0 as float) as x87,
- cast(1.0 as float) as x88,
- cast(1.0 as float) as x89,
- cast(1.0 as float) as x90,
- cast(1.0 as float) as x91,
- cast(1.0 as float) as x92,
- cast(1.0 as float) as x93,
- cast(1.0 as float) as x94,
- cast(1.0 as float) as x95,
- cast(1.0 as float) as x96,
- cast(1.0 as float) as x97,
- cast(1.0 as float) as x98,
- cast(1.0 as float) as x99,
- cast(1.0 as float) as x100;
-
--- step=3
--- source=temp
--- target=temp
--- tableName=temp2
-select predict(
- struct(
- x1,
- x2,
- x3,
- x4,
- x5,
- x6,
- x7,
- x8,
- x9,
- x10,
- x11,
- x12,
- x13,
- x14,
- x15,
- x16,
- x17,
- x18,
- x19,
- x20,
- x21,
- x22,
- x23,
- x25,
- x26,
- x27,
- x28,
- x29,
- x30,
- x31,
- x32,
- x34,
- x35,
- x36,
- x37,
- x38,
- x39,
- x40,
- x41,
- x42,
- x43,
- x44,
- x45,
- x46,
- x47,
- x48,
- x49,
- x50,
- x51,
- x52,
- x53,
- x54,
- x55,
- x56,
- x58,
- x59,
- x60,
- x61,
- x62,
- x63,
- x65,
- x66,
- x67,
- x68,
- x69,
- x70,
- x71,
- x72,
- x73,
- x74,
- x75,
- x76,
- x77,
- x78,
- x79,
- x80,
- x81,
- x85,
- x86,
- x87,
- x88,
- x89,
- x90,
- x91,
- x92,
- x93,
- x94,
- x95,
- x96,
- x97,
- x98,
- x99,
- x100
- )
- ) as target,
- x1,
- x2,
- x3,
- x4,
- x5,
- x6,
- x7,
- x8,
- x9,
- x10,
- x11,
- x12,
- x13,
- x14,
- x15,
- x16,
- x17,
- x18,
- x19,
- x20,
- x21,
- x22,
- x23,
- x25,
- x26,
- x27,
- x28,
- x29,
- x30,
- x31,
- x32,
- x34,
- x35,
- x36,
- x37,
- x38,
- x39,
- x40,
- x41,
- x42,
- x43,
- x44,
- x45,
- x46,
- x47,
- x48,
- x49,
- x50,
- x51,
- x52,
- x53,
- x54,
- x55,
- x56,
- x58,
- x59,
- x60,
- x61,
- x62,
- x63,
- x65,
- x66,
- x67,
- x68,
- x69,
- x70,
- x71,
- x72,
- x73,
- x74,
- x75,
- x76,
- x77,
- x78,
- x79,
- x80,
- x81,
- x85,
- x86,
- x87,
- x88,
- x89,
- x90,
- x91,
- x92,
- x93,
- x94,
- x95,
- x96,
- x97,
- x98,
- x99,
- x100
-from temp1;
-
--- step=4
--- source=temp
--- target=console
-select cast(target['y'] as int) as target,
- cast(target['probability(0)'] as double) as probability_0,
- cast(target['probability(1)'] as double) as probability_1,
- x1,
- x2,
- x3,
- x4,
- x5,
- x6,
- x7,
- x8,
- x9,
- x10,
- x11,
- x12,
- x13,
- x14,
- x15,
- x16,
- x17,
- x18,
- x19,
- x20,
- x21,
- x22,
- x23,
- x25,
- x26,
- x27,
- x28,
- x29,
- x30,
- x31,
- x32,
- x34,
- x35,
- x36,
- x37,
- x38,
- x39,
- x40,
- x41,
- x42,
- x43,
- x44,
- x45,
- x46,
- x47,
- x48,
- x49,
- x50,
- x51,
- x52,
- x53,
- x54,
- x55,
- x56,
- x58,
- x59,
- x60,
- x61,
- x62,
- x63,
- x65,
- x66,
- x67,
- x68,
- x69,
- x70,
- x71,
- x72,
- x73,
- x74,
- x75,
- x76,
- x77,
- x78,
- x79,
- x80,
- x81,
- x85,
- x86,
- x87,
- x88,
- x89,
- x90,
- x91,
- x92,
- x93,
- x94,
- x95,
- x96,
- x97,
- x98,
- x99,
- x100
-from temp2;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/hello_delta.sql b/spark/src/main/resources/tasks/hello_delta.sql
deleted file mode 100644
index 4e406d2..0000000
--- a/spark/src/main/resources/tasks/hello_delta.sql
+++ /dev/null
@@ -1,48 +0,0 @@
--- workflow=hello_delta
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=create database
--- target=delta_lake
-CREATE SCHEMA IF NOT EXISTS delta_db;
-
-
--- step=create table
--- target=delta_lake
-create or replace table delta_db.delta_tbl
-(
- id INT,
- name STRING
-) using delta;
-
-
--- step=insert some sample data
--- target=delta_lake
--- writeMode=execute
-insert into delta_db.delta_tbl
-values (1, "a1"),
- (2, "a2");
-
--- step=print data to console
--- source=delta_lake
--- dbName=delta_db
--- tableName=delta_tbl
--- target=console
-select * from delta_db.delta_tbl;
-
--- step=update sample data
--- target=delta_lake
--- writeMode=execute
-update delta_db.delta_tbl set name = 'a1_new' where id = 1;
-
--- step=delete sample data
--- target=delta_lake
--- writeMode=execute
-delete from delta_db.delta_tbl where id = 2;
-
--- step=print updated data to console
--- source=delta_lake
--- dbName=delta_db
--- tableName=delta_tbl
--- target=console
-select * from delta_db.delta_tbl;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/hello_world.sql b/spark/src/main/resources/tasks/hello_world.sql
deleted file mode 100644
index 82eb9fa..0000000
--- a/spark/src/main/resources/tasks/hello_world.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- workflow=hello_world
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=define variable
--- source=temp
--- target=variables
-
-SELECT 'RESULT' AS `OUTPUT_COL`;
-
--- step=print SUCCESS to console
--- source=temp
--- target=console
-
-SELECT 'SUCCESS' AS `${OUTPUT_COL}`;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/ods.t_fact_order_dwd.t_fact_order.sql b/spark/src/main/resources/tasks/ods.t_fact_order_dwd.t_fact_order.sql
deleted file mode 100644
index 3112fa5..0000000
--- a/spark/src/main/resources/tasks/ods.t_fact_order_dwd.t_fact_order.sql
+++ /dev/null
@@ -1,133 +0,0 @@
--- workflow=ods.t_fact_order_dwd.t_fact_order
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_fact_order
--- target=temp
--- tableName=ods_t_fact_order__extracted
--- writeMode=overwrite
-select
- "order_id" as "order_id",
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_fact_order"
-where "job_id" = ${DATA_RANGE_START};
-
--- step=2
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcAutoCreateDimTransformer
--- methodName=transform
--- createDimMode=once
--- currentAndDimColumnsMapping={"order_create_time":"create_time","user_code":"user_code","user_name":"user_name","user_address":"user_address","user_age":"user_age"}
--- currentAndDimPrimaryMapping={"user_code":"user_code"}
--- currentBusinessCreateTime=order_create_time
--- dimDb=postgres
--- dimDbType=postgres
--- dimTable=dwd.t_dim_user
--- dimTableColumnsAndType={"create_time":"timestamp","user_code":"varchar(128)","user_name":"varchar(128)","user_address":"varchar(128)","user_age":"int"}
--- updateTable=ods_t_fact_order__extracted
--- transformerType=object
--- target=do_nothing
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=4
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "user_id", "user_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=5
--- source=temp
--- tableName=ods_t_fact_order__extracted
--- target=temp
--- tableName=ods_t_fact_order__joined
--- writeMode=append
-select
- `ods_t_fact_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`user_id` end as `user_id`
-from `ods_t_fact_order__extracted`
-left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_fact_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_fact_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_fact_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
-left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_fact_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_code`
- and `ods_t_fact_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_fact_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=6
--- source=temp
--- tableName=ods_t_fact_order__joined
--- target=temp
--- tableName=ods_t_fact_order__target_selected
--- writeMode=overwrite
-select
- `order_id`,
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_fact_order__joined`;
-
--- step=7
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_status":"varchar(128)","actual":"decimal(10,4)","order_create_time":"timestamp","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","product_id":"varchar(128)","discount":"decimal(10,4)","order_update_time":"timestamp","order_id":"varchar(128)","order_sn":"varchar(128)"}
--- primaryFields=order_id
--- slowChanging=false
--- updateTable=ods_t_fact_order__target_selected
--- updateType=incremental
--- transformerType=object
--- target=do_nothing
-
diff --git a/spark/src/main/resources/tasks/ods.t_order_dwd.t_fact_order.sql b/spark/src/main/resources/tasks/ods.t_order_dwd.t_fact_order.sql
deleted file mode 100644
index 1a4bea3..0000000
--- a/spark/src/main/resources/tasks/ods.t_order_dwd.t_fact_order.sql
+++ /dev/null
@@ -1,137 +0,0 @@
--- workflow=ods.t_order_dwd.t_fact_order
--- loadType=incremental
--- logDrivenType=upstream
--- upstream=ods__ods.t_order
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- target=temp
--- tableName=ods_t_order__extracted
--- writeMode=overwrite
-select
- "order_sn" as "order_sn",
- "product_code" as "product_code",
- "product_name" as "product_name",
- "product_version" as "product_version",
- "product_status" as "product_status",
- "user_code" as "user_code",
- "user_name" as "user_name",
- "user_age" as "user_age",
- "user_address" as "user_address",
- "product_count" as "product_count",
- "price" as "price",
- "discount" as "discount",
- "order_status" as "order_status",
- "order_create_time" as "order_create_time",
- "order_update_time" as "order_update_time",
- price - discount as "actual"
-from "postgres"."ods"."t_order"
-where "job_id" = '${DATA_RANGE_START}'
-AND "user_code" = 'u1';
-
--- step=2
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcAutoCreateDimTransformer
--- methodName=transform
--- createDimMode=once
--- currentAndDimColumnsMapping={"order_create_time":"create_time","user_code":"user_info_code","user_name":"user_name","user_age":"user_age","user_address":"user_address"}
--- currentAndDimPrimaryMapping={"user_code":"user_info_code"}
--- currentBusinessCreateTime=order_create_time
--- dimDb=postgres
--- dimDbType=postgres
--- dimTable=dwd.t_dim_user
--- dimTableColumnsAndType={"create_time":"timestamp","user_info_code":"varchar(128)","user_name":"varchar(128)","user_age":"int","user_address":"varchar(128)"}
--- updateTable=ods_t_order__extracted
--- transformerType=object
--- target=do_nothing
-
--- step=3
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_product
--- target=temp
--- tableName=postgres_dwd_t_dim_product__matched
--- writeMode=append
-select
- "product_id", "mid", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_product";
-
--- step=4
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_dim_user
--- target=temp
--- tableName=postgres_dwd_t_dim_user__matched
--- writeMode=append
-select
- "dim_user_id", "user_info_code", "start_time", "end_time"
-from "postgres"."dwd"."t_dim_user";
-
--- step=5
--- source=temp
--- tableName=ods_t_order__extracted
--- target=temp
--- tableName=ods_t_order__joined
--- writeMode=append
-select
- `ods_t_order__extracted`.*,
- case when `postgres_dwd_t_dim_product__matched`.`product_id` is null then '-1'
- else `postgres_dwd_t_dim_product__matched`.`product_id` end as `product_id`,
- case when `postgres_dwd_t_dim_user__matched`.`dim_user_id` is null then '-1'
- else `postgres_dwd_t_dim_user__matched`.`dim_user_id` end as `user_id`
-from `ods_t_order__extracted`
-left join `postgres_dwd_t_dim_product__matched`
- on `ods_t_order__extracted`.`product_code` = `postgres_dwd_t_dim_product__matched`.`mid`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_product__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_product__matched`.`end_time`
- or `postgres_dwd_t_dim_product__matched`.`end_time` is null)
-
-left join `postgres_dwd_t_dim_user__matched`
- on `ods_t_order__extracted`.`user_code` = `postgres_dwd_t_dim_user__matched`.`user_info_code`
- and `ods_t_order__extracted`.`order_create_time` >= `postgres_dwd_t_dim_user__matched`.`start_time`
- and (`ods_t_order__extracted`.`order_create_time` < `postgres_dwd_t_dim_user__matched`.`end_time`
- or `postgres_dwd_t_dim_user__matched`.`end_time` is null);
-
--- step=6
--- source=temp
--- tableName=ods_t_order__joined
--- options
--- column.order_sn.qualityCheckRules=duplicated check, null check
--- column.product_id.qualityCheckRules=mismatch dim check
--- idColumn=order_sn
--- column.user_id.qualityCheckRules=mismatch dim check
--- target=temp
--- tableName=ods_t_order__target_selected
--- writeMode=overwrite
-select
- `order_sn`,
- `product_id`,
- `user_id`,
- `product_count`,
- `price`,
- `discount`,
- `order_status`,
- `order_create_time`,
- `order_update_time`,
- `actual`
-from `ods_t_order__joined`;
-
--- step=7
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcLoadTransformer
--- methodName=transform
--- businessCreateTime=order_create_time
--- businessUpdateTime=order_update_time
--- currentDb=postgres
--- currentDbType=postgres
--- currentTable=dwd.t_fact_order
--- currentTableColumnsAndType={"order_sn":"varchar(128)","product_id":"varchar(128)","user_id":"varchar(128)","product_count":"int","price":"decimal(10,4)","discount":"decimal(10,4)","order_status":"varchar(128)","order_create_time":"timestamp","order_update_time":"timestamp","actual":"decimal(10,4)"}
--- primaryFields=order_sn
--- slowChanging=false
--- updateTable=ods_t_order__target_selected
--- updateType=incremental
--- transformerType=object
--- target=do_nothing
-
diff --git a/spark/src/main/resources/tasks/ods__ods.t_order.sql b/spark/src/main/resources/tasks/ods__ods.t_order.sql
deleted file mode 100644
index 35c4aab..0000000
--- a/spark/src/main/resources/tasks/ods__ods.t_order.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- workflow=ods__ods.t_order
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.order
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- writeMode=append
-SELECT "order_sn" AS "order_sn",
- "product_code" AS "product_code",
- "product_name" AS "product_name",
- "product_version" AS "product_version",
- "product_status" AS "product_status",
- "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "product_count" AS "product_count",
- "price" AS "price",
- "discount" AS "discount",
- "order_status" AS "order_status",
- "order_create_time" AS "order_create_time",
- "order_update_time" AS "order_update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."order"
-WHERE "order_update_time" >= '${DATA_RANGE_START}' AND "order_update_time" < '${DATA_RANGE_END}'
-AND "order_sn" = 'AAA';
diff --git a/spark/src/main/resources/tasks/ods__ods.t_product.sql b/spark/src/main/resources/tasks/ods__ods.t_product.sql
deleted file mode 100644
index 4bd38ce..0000000
--- a/spark/src/main/resources/tasks/ods__ods.t_product.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=ods__ods.t_product
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.product
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_product
--- writeMode=append
-SELECT "mid" AS "product_code",
- "name" AS "product_name",
- "version" AS "product_version",
- "status" AS "product_status",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."product"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/ods__ods.t_user.sql b/spark/src/main/resources/tasks/ods__ods.t_user.sql
deleted file mode 100644
index 6de65e5..0000000
--- a/spark/src/main/resources/tasks/ods__ods.t_user.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=ods__ods.t_user
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.user
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_user
--- writeMode=append
-SELECT "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."user"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/order_report_actual.sql b/spark/src/main/resources/tasks/order_report_actual.sql
deleted file mode 100644
index 92b07f2..0000000
--- a/spark/src/main/resources/tasks/order_report_actual.sql
+++ /dev/null
@@ -1,29 +0,0 @@
--- workflow=order_report_actual
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_fact_order
--- target=postgres
--- dbName=postgres
--- tableName=report.t_fact_order_report_actual
--- writeMode=overwrite
-select
- fact.order_sn order_sn,
- dim.product_id product_id,
- dim.mid product_code,
- dim.name product_name,
- dim.version product_version,
- dim.status product_status,
- fact.price price,
- fact.discount discount,
- fact.order_status order_status,
- fact.order_create_time order_create_time,
- fact.order_update_time order_update_time,
- fact.actual actual
-from dwd.t_fact_order fact
- inner join dwd.t_dim_product dim
- on fact.product_id = dim.product_id;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/order_report_latest.sql b/spark/src/main/resources/tasks/order_report_latest.sql
deleted file mode 100644
index ea83749..0000000
--- a/spark/src/main/resources/tasks/order_report_latest.sql
+++ /dev/null
@@ -1,29 +0,0 @@
--- workflow=order_report_latest
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_fact_order
--- target=postgres
--- dbName=postgres
--- tableName=report.t_fact_order_report_latest
--- writeMode=overwrite
-select
- fact.order_sn order_sn,
- dim2.product_id product_id,
- dim2.mid product_code,
- dim2.name product_name,
- dim2.version product_version,
- dim2.status product_status,
- fact.price price,
- fact.discount discount,
- fact.order_status order_status,
- fact.order_create_time order_create_time,
- fact.order_update_time order_update_time,
- fact.actual actual
-from dwd.t_fact_order fact
- inner join dwd.t_dim_product dim on fact.product_id = dim.product_id
- inner join (select * from dwd.t_dim_product dim_latest where is_latest='1') dim2 on dim.mid = dim2.mid;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/report_order.sql b/spark/src/main/resources/tasks/report_order.sql
deleted file mode 100644
index 2a15dba..0000000
--- a/spark/src/main/resources/tasks/report_order.sql
+++ /dev/null
@@ -1,30 +0,0 @@
--- workflow=report_order
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=dwd.t_fact_order
--- target=postgres
--- dbName=postgres
--- tableName=report.t_fact_order_report
--- writeMode=overwrite
-select
- fact.order_id order_id,
- fact.order_sn order_sn,
- dim.id product_id,
- dim.mid product_code,
- dim.name product_name,
- dim.version product_version,
- dim.status product_status,
- fact.price price,
- fact.discount discount,
- fact.order_status order_status,
- fact.order_create_time order_create_time,
- fact.order_update_time order_update_time,
- fact.actual actual
-from dwd.t_fact_order fact
-inner join dwd.t_dim_product dim
- on fact.product_id = dim.id;
\ No newline at end of file
diff --git a/spark/src/main/resources/tasks/sales.order__ods.t_order.sql b/spark/src/main/resources/tasks/sales.order__ods.t_order.sql
deleted file mode 100644
index e92abe6..0000000
--- a/spark/src/main/resources/tasks/sales.order__ods.t_order.sql
+++ /dev/null
@@ -1,31 +0,0 @@
--- workflow=sales.order__ods.t_order
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.order
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_order
--- writeMode=append
-SELECT "order_sn" AS "order_sn",
- "product_code" AS "product_code",
- "product_name" AS "product_name",
- "product_version" AS "product_version",
- "product_status" AS "product_status",
- "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "product_count" AS "product_count",
- "price" AS "price",
- "discount" AS "discount",
- "order_status" AS "order_status",
- "order_create_time" AS "order_create_time",
- "order_update_time" AS "order_update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."order"
-WHERE "order_update_time" >= '${DATA_RANGE_START}' AND "order_update_time" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/sales.product__ods.t_product.sql b/spark/src/main/resources/tasks/sales.product__ods.t_product.sql
deleted file mode 100644
index 85af929..0000000
--- a/spark/src/main/resources/tasks/sales.product__ods.t_product.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=sales.product__ods.t_product
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.product
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_product
--- writeMode=append
-SELECT "mid" AS "product_code",
- "name" AS "product_name",
- "version" AS "product_version",
- "status" AS "product_status",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."product"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/sales.user__ods.t_user.sql b/spark/src/main/resources/tasks/sales.user__ods.t_user.sql
deleted file mode 100644
index 39747f1..0000000
--- a/spark/src/main/resources/tasks/sales.user__ods.t_user.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=sales.user__ods.t_user
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=postgres
--- dbName=postgres
--- tableName=sales.user
--- target=postgres
--- dbName=postgres
--- tableName=ods.t_user
--- writeMode=append
-SELECT "user_code" AS "user_code",
- "user_name" AS "user_name",
- "user_age" AS "user_age",
- "user_address" AS "user_address",
- "create_time" AS "create_time",
- "update_time" AS "update_time",
- '${JOB_ID}' AS "job_id"
-FROM "postgres"."sales"."user"
-WHERE "update_time" >= '${DATA_RANGE_START}' AND "update_time" < '${DATA_RANGE_END}';
diff --git a/spark/src/main/resources/tasks/sink_to_kafka_test.sql b/spark/src/main/resources/tasks/sink_to_kafka_test.sql
deleted file mode 100644
index 8e07a70..0000000
--- a/spark/src/main/resources/tasks/sink_to_kafka_test.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- workflow=sink_to_kafka_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=batch_kafka
--- topics=sink-to-kafka-6
-with data as (select 'user_id_1' as `user_id`,
- 'user_name_1' as `user_name`
- union all
- select 'user_id_2' as `user_id`,
- 'user_name_2' as `user_name`)
-select *
-from data;
diff --git a/spark/src/main/resources/tasks/streaming_kafka_test.sql b/spark/src/main/resources/tasks/streaming_kafka_test.sql
deleted file mode 100644
index 58186c0..0000000
--- a/spark/src/main/resources/tasks/streaming_kafka_test.sql
+++ /dev/null
@@ -1,57 +0,0 @@
--- workflow=streaming_kafka_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=temp
--- tableName=public_temp
-with data as (select 'user_id_1' as user_id,
- 'user_name_1' as user_name
- union all
- select 'user_id_2' as user_id,
- 'user_name_2' as user_name)
-select *
-from data;
-
--- step=2
--- source=streaming_kafka
--- topics=streaming_kafka_test
--- groupId=streaming_kafka_test_group_1
--- interval=10
--- schemaDDL=`day` STRING,`dataType` STRING,`info_1` STRING,`info_2` STRING,`list` ARRAY
--- target=temp
--- tableName=kafka_temp
-
--- step=3
--- source=temp
--- target=console
-with public_data as (select user_id,
- user_name
- from public_temp),
- kafka_data as (select `day`,
- `data_type`,
- `info_1`,
- `info_2`,
- `list`
- from kafka_temp),
- explode_data as (select `day`,
- `data_type`,
- `info_1`,
- `info_2`,
- explode(`list`) as `json`
- from kafka_data),
- lateral_view_data as (select `day`,
- `data_type`,
- `info_1`,
- `info_2`,
- j.*
- from explode_data
- lateral view
- json_tuple(json, 'id', 'time', 'user_id') j as `id`, `time`, `user_id`)
-select *
-from lateral_view_data
- left semi
- join public_data
- on lateral_view_data.user_id = public_data.user_id;
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/Entrypoint.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/Entrypoint.scala
deleted file mode 100644
index 231b49c..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/Entrypoint.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.spark
-
-import com.github.sharpdata.sharpetl.spark.cli.Command
-import picocli.CommandLine
-
-
-object Entrypoint {
- val errorHandler: CommandLine.IExecutionExceptionHandler =
- new CommandLine.IExecutionExceptionHandler() {
- def handleExecutionException(ex: Exception, commandLine: CommandLine, parseResult: CommandLine.ParseResult): Int = {
- ex.printStackTrace()
- commandLine.getCommandSpec.exitCodeOnExecutionException
- }
- }
-
- def main(args: Array[String]): Unit = {
- val code = new CommandLine(new Command()).setExecutionExceptionHandler(errorHandler).execute(
- args: _*
- )
- if (!succeed(code)) {
- System.exit(code)
- }
- }
-
- private def succeed(code: Int) = {
- code == 0
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/cli/Command.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/cli/Command.scala
deleted file mode 100644
index 2e1cff2..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/cli/Command.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.cli
-
-import com.github.sharpdata.sharpetl.modeling.cli.{GenerateDwdStepCommand, GenerateSqlFiles}
-import com.github.sharpdata.sharpetl.spark.utils.JavaVersionChecker
-import com.github.sharpdata.sharpetl.core.api.WfEvalResult.throwFirstException
-import com.github.sharpdata.sharpetl.core.api.{LogDrivenInterpreter, WfEvalResult}
-import com.github.sharpdata.sharpetl.core.cli.{BatchJobCommand, EncryptionCommand, SingleJobCommand}
-import com.github.sharpdata.sharpetl.core.notification.NotificationUtil
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRuleConfig.readQualityCheckRules
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.util.FlywayUtil.migrate
-import com.github.sharpdata.sharpetl.core.util._
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.getSparkInterpreter
-import picocli.CommandLine
-
-
-@CommandLine.Command(name = "single-job")
-class SingleSparkJobCommand extends SingleJobCommand {
- override def run(): Unit = {
- loggingJobParameters()
- ETLConfig.extraParam = extraParams
- ETLConfig.setPropertyPath(propertyPath, env)
- val etlDatabaseType = JDBCUtil.dbType
- val interpreter = getSparkInterpreter(local, wfName, releaseResource, etlDatabaseType, readQualityCheckRules())
- migrate()
- JavaVersionChecker.checkJavaVersion()
- try {
- val wfInterpretingResult: WfEvalResult = LogDrivenInterpreter(
- WorkflowReader.readWorkflow(wfName),
- interpreter,
- jobLogAccessor = jobLogAccessor,
- command = this
- ).eval()
- new NotificationUtil(jobLogAccessor).notify(Seq(wfInterpretingResult))
- throwFirstException(Seq(wfInterpretingResult))
- } finally {
- interpreter.close()
- }
- }
-}
-
-@CommandLine.Command(name = "batch-job")
-class BatchSparkJobCommand extends BatchJobCommand {
- override def run(): Unit = {
- loggingJobParameters()
- ETLConfig.extraParam = extraParams
- ETLConfig.setPropertyPath(propertyPath, env)
- JavaVersionChecker.checkJavaVersion()
- migrate()
- val etlDatabaseType = JDBCUtil.dbType
- // val logDrivenInterpreters = if (excelOptions != null) getJobsFromExcel(etlDatabaseType) else getInterpretersFromSqlFile(etlDatabaseType)
- val logDrivenInterpreters = getInterpretersFromSqlFile(etlDatabaseType)
- val batchJobResult: Seq[WfEvalResult] =
- try {
- logDrivenInterpreters.map(_.eval())
- } finally {
- logDrivenInterpreters.headOption.foreach(_.workflowInterpreter.close())
- }
- val failedCount = batchJobResult.map(_.jobLogs.count { it => it.isFailure() }).sum
- val skippedCount = batchJobResult.map(_.jobLogs.count { it => it.isSkipped() }).sum
- val successCount = batchJobResult.map(_.jobLogs.count { it => it.isSuccess() }).sum
-
- ETLLogger.info(
- s"""
- |Total jobs: ${logDrivenInterpreters.size}, success: $successCount, failed: $failedCount, skipped: $skippedCount
- |Details:
- |${batchJobResult.map(_.toString).mkString("\n\n")}
- |""".stripMargin)
- new NotificationUtil(jobLogAccessor).notify(batchJobResult)
- if (failedCount > 0) {
- throwFirstException(batchJobResult)
- }
- }
-
- def getInterpretersFromSqlFile(etlDatabaseType: String): Seq[LogDrivenInterpreter] = {
- sqlFileOptions.wfNames
- .map(wfName => {
- val interpreter = getSparkInterpreter(local, wfName, releaseResource, etlDatabaseType, readQualityCheckRules())
- JavaVersionChecker.checkJavaVersion()
- LogDrivenInterpreter(
- WorkflowReader.readWorkflow(wfName),
- interpreter,
- jobLogAccessor = jobLogAccessor,
- command = this
- )
- })
- }
-}
-
-@CommandLine.Command(
- subcommands = Array(
- classOf[SingleSparkJobCommand],
- classOf[BatchSparkJobCommand],
- classOf[GenerateSqlFiles],
- classOf[EncryptionCommand],
- classOf[GenerateDwdStepCommand]
- )
-)
-class Command extends Runnable {
-
- override def run(): Unit = ()
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CSVDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CSVDataSource.scala
deleted file mode 100644
index ec34702..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CSVDataSource.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.datasource.config.CSVDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil.mv
-import com.github.sharpdata.sharpetl.core.util.{HDFSUtil, StringUtil}
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.functions.lit
-import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
-
-import java.sql.Timestamp
-import java.time.LocalDate
-import java.time.format.DateTimeFormatter
-import scala.util.matching.Regex
-
-@source(types = Array("csv"))
-@sink(types = Array("csv"))
-class CSVDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- loadFromHdfs(executionContext, step.getSourceConfig)
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- save(df, step.getTargetConfig)
- }
-
- def loadFromHdfs(spark: SparkSession,
- sourceConfig: CSVDataSourceConfig): DataFrame = {
- val df = spark
- .read
- .option("inferSchema", sourceConfig.getInferSchema)
- .option("encoding", sourceConfig.getEncoding)
- .option("sep", sourceConfig.getSep)
- .option("header", sourceConfig.getHeader)
- .option("quote", sourceConfig.getQuote)
- .option("escape", sourceConfig.getEscape)
- .option("multiLine", sourceConfig.getMultiLine)
- .option("ignoreTrailingWhiteSpace", sourceConfig.getIgnoreTrailingWhiteSpace)
- .csv(sourceConfig.filePath)
- .selectExpr(sourceConfig.getSelectExpr.split(","): _*)
- .withColumn("file_name", lit(StringUtil.getFileNameFromPath(sourceConfig.filePath)))
-
- if (!StringUtil.isNullOrEmpty(sourceConfig.parseTimeFromFileNameRegex)) {
- val filename = StringUtil.getFileNameFromPath(sourceConfig.filePath)
- val parseDate = sourceConfig.parseTimeColumnName
- val maybeMatch = new Regex(sourceConfig.parseTimeFromFileNameRegex, parseDate).findFirstMatchIn(filename)
- val parseDateStr = maybeMatch.get.group(parseDate)
-
- val dateFormat = DateTimeFormatter.ofPattern(sourceConfig.parseTimeFormatPattern)
- df.withColumn(parseDate, lit(Timestamp.valueOf(LocalDate.parse(parseDateStr, dateFormat).atStartOfDay())))
- } else {
- df
- }
-
- }
-
- def save(df: DataFrame,
- targetConfig: CSVDataSourceConfig): Unit = {
- val tempTargetPath = StringUtil.uuid
- val targetPath = targetConfig.getFilePath
- df
- .repartition(1)
- .write
- .option("encoding", targetConfig.getEncoding)
- .option("sep", targetConfig.getSep)
- .option("header", targetConfig.getHeader)
- .option("mapreduce.fileoutputcommitter.algorithm.version", "1")
- .mode(SaveMode.Overwrite)
- .csv(tempTargetPath)
- mv(
- HDFSUtil.listFileUrl(tempTargetPath, "part-00000.*").head,
- targetPath,
- overWrite = true
- )
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSource.scala
deleted file mode 100644
index 29ec848..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSource.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import java.io.{File, FileOutputStream}
-import com.github.sharpdata.sharpetl.core.datasource.config.CompressTarConfig
-import com.github.sharpdata.sharpetl.core.exception.Exception.StepFailedException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarArchiveInputStream}
-import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream
-import org.apache.hadoop.fs.Path
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import scala.util.matching.Regex
-
-
-@source(types = Array("compresstar"))
-class CompressTarDataSource extends Source[DataFrame, SparkSession] {
-
- val replaceCharOld = "/"
- val replaceCharNew = "_"
- val bufferSize = 4096
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- loadFromCompressTar(executionContext, step.getSourceConfig)
- }
-
- def loadFromCompressTar(spark: SparkSession, dataSourceConfig: CompressTarConfig): DataFrame = {
- val filesPath = dataSourceConfig.getTarPath
- val tarDir = filesPath.substring(0, filesPath.lastIndexOf(replaceCharOld))
- val tarPattern = filesPath.substring(filesPath.lastIndexOf(replaceCharOld) + 1, filesPath.length)
-
- HDFSUtil.listFileUrl(tarDir, tarPattern)
- .map(it => it.substring(it.lastIndexOf(replaceCharOld) + 1, it.length))
- .map(it => s"${tarDir}/${it}")
- .foreach(it => {
- extractFile(dataSourceConfig, it)
- })
- spark.emptyDataFrame
- }
-
- private def filterByFilePatten(sourceFile: String, filePattern: Regex): Boolean = {
- filePattern findFirstMatchIn sourceFile match {
- case Some(_) => true
- case _ => false
- }
- }
-
-
- private def getTargetFileName(targetPath: String, fileName: String): String = {
- val targetFile = fileName.replace(replaceCharOld, replaceCharNew)
- targetPath + targetFile
- }
-
- private def getBaktFileName(bakPath: String, fileName: String): String = {
- val bakFileName = fileName.substring(fileName.lastIndexOf("/") + 1)
- bakPath + bakFileName
- }
-
- private def getTmpFileName(targetPath: String, fileName: String): String = {
- val targetFile = fileName.replace(replaceCharOld, replaceCharNew)
- targetPath + targetFile
- }
-
-
- def isHDFSPath(path: String): Boolean = {
- path.indexOf("hdfs:") != -1
- }
-
- private def isPassEmptyFile(config: CompressTarConfig, entry: TarArchiveEntry): Boolean = {
- config.isPassEmptyFile == true.toString && entry.getSize == 0L
- }
-
- private def extractFile(config: CompressTarConfig, file: String): Unit = {
- val filePatten = new Regex(config.fileNamePattern)
- try {
- val fs = HDFSUtil.getFileSystem()
- val tar = new TarArchiveInputStream(new GzipCompressorInputStream(HDFSUtil.readFile(file, fs)))
-
- var entry: TarArchiveEntry = tar.getNextTarEntry
- while (entry != null) {
- if (!entry.isDirectory && filterByFilePatten(entry.getName, filePatten) && !isPassEmptyFile(config, entry)) {
- ETLLogger.info(s"exact fileName:${entry.getName}")
-
- val targetFileName = getTargetFileName(config.getTargetPath, entry.getName)
- val tmpFileName = getTmpFileName(config.tmpPath, entry.getName)
- val output = new FileOutputStream(new File(tmpFileName), false)
-
- val buffer = new Array[Byte](bufferSize)
- var len = tar.read(buffer)
- while (len != -1) {
- output.write(buffer, 0, len)
- len = tar.read(buffer)
- }
- output.flush()
- fs.moveFromLocalFile(new Path(tmpFileName), new Path(targetFileName))
- }
- entry = tar.getNextTarEntry
- }
- HDFSUtil.mv(fs, file, getBaktFileName(config.bakPath, file), true)
- tar.close()
- HDFSUtil.closeFileSystem(fs)
- } catch {
- case e: Exception => throw StepFailedException(e, "appear exception when unzip tar !")
-
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ConsoleDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ConsoleDataSource.scala
deleted file mode 100644
index 4090c5e..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ConsoleDataSource.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.DataFrame
-
-@sink(types = Array("console"))
-class ConsoleDataSource extends Sink[DataFrame] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- df.printSchema()
- df.show(numRows = 100 * 100, truncate = false)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DeltaLakeDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DeltaLakeDataSource.scala
deleted file mode 100644
index cee12d4..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DeltaLakeDataSource.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode.MERGE_WRITE
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuid
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("delta_lake"))
-@sink(types = Array("delta_lake"))
-class DeltaLakeDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- executionContext.sql(step.getSql)
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- save(df, step)
- }
-
- def save(df: DataFrame, step: WorkflowStep): Unit = {
- val writeMode: String = step.writeMode
- val config = step.target.asInstanceOf[DBDataSourceConfig]
- val dbName = config.dbName
- val resultTempTable = s"$uuid"
- val tableName = config.tableName
- if (!df.isEmpty) {
- try {
- if (writeMode == WriteMode.EXECUTE) {
- sparkSession.sql(step.sql)
- } else {
- df.createTempView(resultTempTable)
- ETLLogger.info(s"Saved data to temp table $resultTempTable")
-
- val saveMode = writeMode match {
- case WriteMode.OVER_WRITE | MERGE_WRITE =>
- "overwrite"
- case WriteMode.APPEND =>
- "into"
- }
- val insertSql =
- s"""
- |insert $saveMode table $dbName.$tableName
- |select * from $resultTempTable
- |""".stripMargin
- ETLLogger.info(s"""[$tableName] Insert Sql: $insertSql""")
- sparkSession.sql(insertSql)
- }
- } finally {
- sparkSession.sql(s"drop table if exists $resultTempTable")
- }
- } else {
- ETLLogger.error(s"Source is empty, nothing need to be written into target table: $tableName")
- }
- }
-
- /*private def write(df: DataFrame, step: WorkflowStep, targetPath: String) = {
- df
- .write
- .format("delta")
- .mode(step.getWriteMode)
- .save(targetPath)
- }
-
- private def writeByPartition(df: DataFrame, step: WorkflowStep, targetPath: String, partitionColumn: String) = {
- df
- .write
- .format("delta")
- .mode(step.getWriteMode)
- .partitionBy(partitionColumn)
- .save(targetPath)
- }
-
- def load(spark: SparkSession,
- step: WorkflowStep): DataFrame = {
- spark
- .read
- .format("delta")
- .load(s"$deltaLakeBasePath/${step.source.asInstanceOf[DBDataSourceConfig].tableName}")
- }*/
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DoNothingDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DoNothingDataSource.scala
deleted file mode 100644
index be32524..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/DoNothingDataSource.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("do_nothing"))
-@sink(types = Array("do_nothing"))
-class DoNothingDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- import executionContext.implicits._
- executionContext.sparkContext.parallelize(Seq("")).toDF("line")
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ()
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ExcelDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ExcelDataSource.scala
deleted file mode 100644
index 93abb43..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ExcelDataSource.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.datasource.config.ExcelDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("excel"))
-class ExcelDataSource extends Source[DataFrame, SparkSession] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- // scalastyle:off
- val sourceConfig = step.getSourceConfig[ExcelDataSourceConfig]
- import com.crealytics.spark.excel._
- executionContext
- .read
- .options(sourceConfig.getOptions())
- .excel(
- header = sourceConfig.getHeader.toBoolean,
- treatEmptyValuesAsNulls = sourceConfig.getTreatEmptyValuesAsNulls.toBoolean,
- inferSchema = sourceConfig.getInferSchema.toBoolean,
- addColorColumns = sourceConfig.getAddColorColumns.toBoolean,
- dataAddress = sourceConfig.getDataAddress,
- timestampFormat = sourceConfig.getTimestampFormat,
- maxRowsInMemory = sourceConfig.getMaxRowsInMemory match {
- case s: String => s.toInt
- case _ => null
- },
- excerptSize = sourceConfig.getExcerptSize,
- workbookPassword = sourceConfig.getWorkbookPassword
- )
- .load(sourceConfig.getFilePath)
- .selectExpr(
- s"'${StringUtil.getFileNameFromPath(sourceConfig.getFilePath)}' as file_name",
- "*"
- )
- // scalastyle:on
- }
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/FtpDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/FtpDataSource.scala
deleted file mode 100644
index 5a51c52..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/FtpDataSource.scala
+++ /dev/null
@@ -1,246 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.datasource.config.TextFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.connection.FtpConnection
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.Encoding
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil, IOUtil, StringUtil}
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.commons.net.ftp._
-import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.compress.CompressionCodecFactory
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import java.io._
-
-@sink(types = Array("ftp"))
-class FtpDataSource extends Sink[DataFrame] {
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = save(step)
-
- def closeFTPClient(ftpClient: FTPClient): Unit = {
- if (ftpClient != null) {
- try {
- ftpClient.logout()
- } catch {
- case e: Exception =>
- ETLLogger.error("close FTP failed.", e)
- } finally {
- if (ftpClient.isConnected) {
- try {
- ftpClient.disconnect()
- } catch {
- case e: IOException =>
- ETLLogger.error("close FTP failed.", e)
- }
- }
- }
- }
- }
-
- def getFTPClient(configPrefix: String): FTPClient = {
- val config = new FtpConnection(configPrefix)
- getFTPClient(config)
- }
-
- def getFTPClient(ftpConfig: FtpConnection): FTPClient = {
- getFTPClient(
- ftpConfig.host,
- ftpConfig.port,
- ftpConfig.user,
- ftpConfig.password
- )
- }
-
- def getFTPClient(host: String,
- port: Int,
- user: String,
- password: String): FTPClient = {
- val ftp: FTPClient = new FTPClient
- ftp.connect(host, port)
- ftp.login(user, password)
- ftp.setConnectTimeout("50000".toInt)
- ftp.setControlEncoding(Encoding.UTF8)
- ftp.setAutodetectUTF8(true)
- ftp.enterLocalPassiveMode()
- ftp.setFileType(FTP.BINARY_FILE_TYPE)
- if (!FTPReply.isPositiveCompletion(ftp.getReplyCode)) {
- ftp.disconnect()
- throw new RuntimeException("connect FTP failed.")
- } else {
- ETLLogger.info("FTP connect success.")
- }
- ftp
- }
-
- def listFileUrl(ftpConfig: FtpConnection, fileNamePattern: String): List[String] = {
- val ftp = getFTPClient(ftpConfig)
- val files: List[String] = listFile(ftp, ftpConfig.dir, fileNamePattern)
- .map(fileName => s"${ftpConfig.dir}/$fileName")
- closeFTPClient(ftp)
- files
- }
-
- def listFile(configPrefix: String, fileDir: String, fileNamePattern: String): List[String] = {
- val ftp = getFTPClient(configPrefix)
- val files: List[String] = listFile(ftp, fileDir, fileNamePattern)
- closeFTPClient(ftp)
- files
- }
-
- def listFile(ftp: FTPClient, fileDir: String, fileNamePattern: String): List[String] = {
- ftp
- .listFiles(
- fileDir,
- new FTPFileFilter {
- override def accept(file: FTPFile): Boolean =
- fileNamePattern.r.findFirstMatchIn(file.getName).isDefined
- }
- )
- .map(_.getName)
- .toList
- }
-
- def delete(configPrefix: String, path: String): Boolean = {
- val ftp = getFTPClient(configPrefix)
- try {
- delete(ftp, path)
- } catch {
- case e: Exception =>
- ETLLogger.error(s"Delete file '$path' from ftp failed.", e)
- throw e
- } finally {
- ETLLogger.info(s"Delete file '$path' from ftp success.")
- closeFTPClient(ftp)
- }
- }
-
- def delete(ftp: FTPClient, path: String): Boolean = {
- ftp.deleteFile(path)
- }
-
- def save(step: WorkflowStep): Unit = {
- val sourceConfig: TextFileDataSourceConfig = step.getSourceConfig[TextFileDataSourceConfig]
- val prefix = sourceConfig.getConfigPrefix
- val ftpConfig = new FtpConnection(prefix)
- val localPath = sourceConfig.getFilePath
- val targetConfig = step.getTargetConfig[TextFileDataSourceConfig]
- val remote = targetConfig.getFilePath
- val fs = HDFSUtil.getFileSystem()
- val local = fs.open(new Path(localPath))
- ftpTo(ftpConfig, local, remote)
- local.close()
- }
-
- def ftpTo(ftpConfig: FtpConnection, local: InputStream, remote: String): Unit = {
- val ftpClient = getFTPClient(ftpConfig)
- ftpTo(ftpClient, local, remote)
- closeFTPClient(ftpClient)
- }
-
- def mkdir(ftp: FTPClient, path: String): Boolean = {
- val parentPath = StringUtil.getParentPath(path)
- if (ftp.listDirectories(parentPath).isEmpty) {
- mkdir(ftp, parentPath)
- }
- if (ftp.listDirectories(path).isEmpty) {
- ftp.makeDirectory(path)
- } else {
- true
- }
- }
-
- def ftpTo(ftp: FTPClient, local: InputStream, remote: String): Unit = {
- ftp.deleteFile(remote)
- val fildDir = StringUtil.getParentPath(remote)
- mkdir(ftp, fildDir)
- val tempFilePath = StringUtil.concatFilePath(fildDir, StringUtil.uuid)
- val success = ftp.storeFile(tempFilePath, local)
- ftp.rename(tempFilePath, remote)
- if (success) {
- ETLLogger.info(s"Upload file to '$remote' success.")
- } else {
- val errMsg = s"Upload file to '$remote' failed."
- ETLLogger.error(errMsg)
- throw new RuntimeException(errMsg)
- }
- }
-
- def ftpFrom(ftpConfig: FtpConnection, remote: String, local: String): Unit = {
- val ftpClient = getFTPClient(ftpConfig)
- ftpFrom(ftpClient, remote, local)
- closeFTPClient(ftpClient)
- }
-
- def ftpFrom(ftp: FTPClient, remote: String, local: String): Unit = {
- val localFile = new File(local)
- val outputStream = try {
- new BufferedOutputStream(new FileOutputStream(localFile))
- } catch {
- case e: Exception =>
- ETLLogger.error("Create FileOutputStream failed.", e)
- throw e
- }
- val success = ftp.retrieveFile(
- new String(remote.getBytes(Encoding.UTF8), Encoding.ISO_8859_1),
- outputStream
- )
- if (success) {
- ETLLogger.info(s"Download file success, from FTP '$remote' to local '$local'.")
- } else {
- val errMsg = s"Download file failed, from FTP '$remote' to local '$local'."
- ETLLogger.error(errMsg)
- throw new RuntimeException(errMsg)
- }
- try {
- outputStream.close()
- } catch {
- case e: Exception =>
- ETLLogger.error("Close FileOutputStream failed.", e)
- }
- }
-
- def load(spark: SparkSession,
- step: WorkflowStep,
- job: JobLog): DataFrame = {
- val sourceConfig = step.getSourceConfig[TextFileDataSourceConfig]
- val prefix = sourceConfig.getConfigPrefix
- val ftpConfig = new FtpConnection(prefix)
-
- val remoteFilePath = sourceConfig.getFilePath
- val fileName = StringUtil.getFileNameFromPath(remoteFilePath)
- val localTempFilePath = StringUtil.concatFilePath(ftpConfig.getLocalTempDir, fileName)
- val hdfsTempFilePath = CompressionCodecFactory.removeSuffix(
- StringUtil.concatFilePath(ftpConfig.getHdfsTempDir, fileName),
- sourceConfig.codecExtension
- )
-
- IOUtil.mkdirs(ftpConfig.getLocalTempDir)
- ftpFrom(ftpConfig, remoteFilePath, localTempFilePath)
-
- HDFSUtil.mkdirs(ftpConfig.getHdfsTempDir)
- HDFSUtil.put(
- localTempFilePath,
- hdfsTempFilePath,
- sourceConfig.getCodecExtension,
- sourceConfig.getDecompress.toBoolean
- )
- IOUtil.delete(localTempFilePath)
-
- sourceConfig.setFilePath(hdfsTempFilePath)
- //TODO: [[com.github.sharpdata.sharpetl.common.model.LoadFileLog]]
- /*val loadFileLog = new LoadFileLog
- loadFileLog.setFilePath(remoteFilePath)
- loadFileLog.setTaskId(job.jobId)
- if (!source.getOnlyOneName.toBoolean) {
- TaskDataAccessor.newLoadFileLog(loadFileLog)
- }*/
- spark.emptyDataFrame
- }
-
-}
-
-object FtpDataSource extends FtpDataSource
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSource.scala
deleted file mode 100644
index 7fc90c7..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSource.scala
+++ /dev/null
@@ -1,119 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.datasource.config._
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.{DataSourceType, TransformerType}
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil._
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, HDFSUtil, StringUtil}
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.spark.utils.ReflectUtil
-import org.apache.spark.sql.{DataFrame, Row}
-
-@sink(types = Array("hdfs"))
-class HdfsDataSource extends Sink[DataFrame] {
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- save(df, step)
- }
-
- def filter(files: List[String], step: WorkflowStep): List[String] = {
- val sourceConfig = step.getSourceConfig[FileDataSourceConfig]
- val func = sourceConfig.getFileFilterFunc
-
- val result = ReflectUtil.execute(func, "filter", TransformerType.OBJECT_TYPE, files)
- result.asInstanceOf[List[String]]
- }
-
- def listFileUrl(step: WorkflowStep): List[String] = {
- val sourceConfig = step.getSourceConfig[FileDataSourceConfig]
- var dir = ETLConfig.getProperty(s"${sourceConfig.getConfigPrefix}.hdfs.dir")
- if (sourceConfig.getFileDir != null && sourceConfig.getFileDir != "") {
- dir = sourceConfig.getFileDir
- }
- val files = HDFSUtil.listFileUrl(
- dir,
- sourceConfig.getFileNamePattern
- )
- if (sourceConfig.getFileFilterFunc != null) {
- filter(files, step)
- } else {
- files
- }
- }
-
- def getTargetDir(step: WorkflowStep): String = {
- val targetConfig = step.getTargetConfig[TextFileDataSourceConfig]
- if (targetConfig.getFileDir != null && targetConfig.getFileDir != "") {
- targetConfig.getFileDir
- } else {
- ETLConfig.getProperty(s"${targetConfig.getConfigPrefix}.hdfs.dir")
- }
- }
-
- private def saveFromFile(step: WorkflowStep): Unit = {
- val targetDir = getTargetDir(step)
- mkdirs(targetDir)
- val src = step.getSourceConfig[TextFileDataSourceConfig].getFilePath
- val fileName = StringUtil.getFileNameFromPath(src)
- val target = StringUtil.concatFilePath(targetDir, fileName)
- mv(src, target, overWrite = true)
- }
-
- private def saveFromDB(df: DataFrame, step: WorkflowStep): Unit = {
- val targetConfig = step.getTargetConfig[TextFileDataSourceConfig]
- val targetPath = targetConfig.filePath
- val encoding = targetConfig.getEncoding
- val codecExtension = targetConfig.getCodecExtension
- val separator = targetConfig.getSeparator
- val mapFunction = if (separator != null & separator != "") {
- (row: Row) => row.toSeq.mkString(separator)
- } else {
- val fieldLengthArray = targetConfig
- .getFieldLengthConfig
- .split(",")
- .map(_.toInt)
- .zipWithIndex
- (row: Row) => {
- fieldLengthArray
- .map {
- case (fieldLength, index) =>
- val result = Array.fill(fieldLength)(32.toByte)
- val fieldValue = row.get(index).toString
- fieldValue.getBytes(encoding).copyToArray(result)
- result
- }
- .mkString
- }
- }
- val codec = getCodecByExtension(codecExtension)
- val rdd = df
- .rdd
- .map(mapFunction)
- .repartition(1)
- val tempTargetPath = StringUtil.uuid
- if (codec.isDefined) {
- rdd.saveAsTextFile(tempTargetPath, codec.get.getClass)
- } else {
- rdd.saveAsTextFile(tempTargetPath)
- }
- mv(
- HDFSUtil.listFileUrl(tempTargetPath, "part-00000.*").head,
- targetPath,
- overWrite = true
- )
- delete(tempTargetPath)
- }
-
- def save(df: DataFrame, step: WorkflowStep): Unit = {
- step.source.dataSourceType match {
- case DataSourceType.SCP | DataSourceType.FTP | DataSourceType.HDFS =>
- saveFromFile(step)
- case _ =>
- saveFromDB(df, step)
- }
- }
-}
-
-object HdfsDataSource extends HdfsDataSource
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSource.scala
deleted file mode 100644
index 7f04b5f..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSource.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.exception.Exception.InvalidSqlException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode.MERGE_WRITE
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{assertNotEmpty, uuid}
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, StringUtil}
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.{autoPurgeHiveTable, sparkSession}
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-import com.github.sharpdata.sharpetl.spark.utils.SparkCatalogUtil
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import scala.jdk.CollectionConverters._
-
-@source(types = Array("hive"))
-@sink(types = Array("hive"))
-class HiveDataSource extends Sink[DataFrame] with Source[DataFrame, SparkSession] {
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- save(df, step.target.asInstanceOf[DBDataSourceConfig].getTableName, step, variables)
- }
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- load(executionContext, step.getSql)
- }
-
- def load(spark: SparkSession, selectSql: String): DataFrame = {
- // work-around for https://issues.apache.org/jira/browse/SPARK-38173
- // Fixed in spark 3.3.0 in https://github.com/apache/spark/commit/1ef5638177dcf06ebca4e9b0bc88401e0fce2ae8
- if (selectSql.contains("+.+")) {
- sparkSession.sql("SET spark.sql.parser.quotedRegexColumnNames=true")
- val df = spark.sql(selectSql)
- sparkSession.sql("SET spark.sql.parser.quotedRegexColumnNames=false")
- df
- } else {
- spark.sql(selectSql)
- }
- }
-
- def save(df: DataFrame, targetTable: String, step: WorkflowStep, variables: Variables): Unit = {
- val writeMode: String = step.writeMode
- val dbName = step.target.asInstanceOf[DBDataSourceConfig].dbName
- val resultTempTable = s"$dbName.$uuid"
- /*if (step.logDrivenType == DIFF) {
- //this column MUST be partition column with name configured by [[com.github.sharpdata.sharpetl.core.util.ETLConfig.partitionColumn]]
- df.withColumn(partitionColumn, lit(variables(partitionColumn)))
- }*/
- if (!df.isEmpty) {
- try {
- df.write.mode(WriteMode.OVER_WRITE).saveAsTable(resultTempTable)
- ETLLogger.info(s"Saved data to temp table $resultTempTable")
- val insertSql = buildInsertSql(
- resultTempTable,
- df.schema.fieldNames.map(_.toLowerCase),
- dbName,
- targetTable,
- writeMode,
- step
- )
- ETLLogger.info(s"""[$targetTable] Insert Sql: $insertSql""")
- sparkSession.sql(insertSql)
- } finally {
- autoPurgeHiveTable(resultTempTable)
- sparkSession.sql(s"drop table if exists $resultTempTable")
- }
- } else {
- ETLLogger.error(s"Source is empty, nothing need to be written into target table $targetTable")
- }
- }
-
- private def buildInsertSql(sourceTable: String,
- sourceSchema: Array[String],
- targetDb: String,
- targetTable: String,
- writeMode: String,
- step: WorkflowStep): String = {
- assertNotEmpty(targetDb, "targetDb")
- assertNotEmpty(targetTable, "targetTable")
- val prefix = if (isNullOrEmpty(targetDb)) StringUtil.EMPTY else s"$targetDb."
- val fullTablePath = s"$prefix$targetTable"
- autoPurgeHiveTable(fullTablePath)
- val targetPartitionColNames = SparkCatalogUtil.getPartitionColNames(targetDb, targetTable)
- val targetNonePartitionColNames = SparkCatalogUtil.getNonePartitionColNames(targetDb, targetTable)
- val targetAllColNames = Array.concat(targetNonePartitionColNames, targetPartitionColNames)
- val partitionClause = if (targetPartitionColNames.nonEmpty) {
- s"partition ${targetPartitionColNames.mkString("(", ", ", ")")}"
- } else {
- ""
- }
- verifySchema(sourceSchema, targetAllColNames)
-
- val selectClause = if (step.isUseTargetSchema.equals(true.toString)) {
- buildInsertSqlByTargetSchema(sourceSchema, targetAllColNames)
- } else {
- buildInsertSqlByComparingWithTargetSchema(sourceSchema, targetAllColNames)
- }
-
- val saveMode = writeMode match {
- case WriteMode.OVER_WRITE | MERGE_WRITE =>
- "overwrite"
- case WriteMode.APPEND =>
- "into"
- }
-
- s"""
- |insert $saveMode table $fullTablePath $partitionClause
- |select * from (
- |select $selectClause from $sourceTable
- |${if (writeMode == MERGE_WRITE) selfUnionClause(sourceTable, fullTablePath, targetPartitionColNames, writeMode, step) else StringUtil.EMPTY} )
- |distribute by ${(targetPartitionColNames ++ List("rand()")).mkString(",")}
- |""".stripMargin
- }
-
- private def verifySchema(sourceSchema: Array[String], targetAllColNames: Array[String]): Unit = {
- val sourceSet = sourceSchema.map(_.toLowerCase()).toSet
- val targetSet = targetAllColNames.map(_.toLowerCase()).toSet
- if (sourceSet.diff(targetSet).nonEmpty && targetSet.diff(sourceSet).nonEmpty) {
- ETLLogger.warn(
- s"""
- |sourceSchema is not the same with targetSchema.
- |
- |sourceSchema - targetSchema: ${sourceSet.diff(targetSet)}
- |targetSchema - sourceSchema: ${targetSet.diff(sourceSet)}
- |""".stripMargin)
- }
- }
-
- def buildInsertSqlByComparingWithTargetSchema(sourceSchema: Array[String], targetSchema: Array[String]): String = {
- targetSchema.map(targetColumn => {
- if (sourceSchema.contains(targetColumn)) {
- s"`$targetColumn`"
- } else {
- s"null as `$targetColumn`"
- }
- }).mkString(",\n ")
- }
-
- /**
- * 是否默认使用target schema,仅在target column size > source column size 的时候才使用null填充超出部分,否则:
- * 只要target column不在source schema里面,就用as把相同index的source column转成对应index的target column
- */
- def buildInsertSqlByTargetSchema(sourceSchema: Array[String], targetSchema: Array[String]): String = {
- targetSchema.indices.map(idx => {
- if (idx >= sourceSchema.length) {
- s"null as `${targetSchema(idx)}`"
- } else {
- if (sourceSchema.contains(targetSchema(idx))) {
- s"`${targetSchema(idx)}` as `${targetSchema(idx)}`"
- } else {
- s"`${sourceSchema(idx)}` as `${targetSchema(idx)}`"
- }
- }
- }).mkString(",\n ")
- }
-
- def selfUnionClause(sourceTable: String, fullTablePath: String, targetPartitionColNames: Array[String],
- writeMode: String, step: WorkflowStep): String = {
- // idempotent support for [[MERGE_WRITE]] mode:
- // not select the (physical or logic) partition from target table, so we could safely overwrite the data of that partition
- val idempotentQueryClause = writeMode match {
- case MERGE_WRITE =>
- if (step.sql.toLowerCase.contains("where")) {
- s"""!(${step.sql.toLowerCase.split("where").reverse.head.trim.stripSuffix(";")})"""
- } else {
- throw InvalidSqlException("Where clause must exist in sql when using `mergewrite` mode!")
- }
- case _ =>
- StringUtil.EMPTY
- }
- val partitionWhereClause =
- sparkSession.sql(s"select ${targetPartitionColNames.mkString("distinct(", ", ", ")")} from $sourceTable")
- .toLocalIterator()
- .asScala
- .map(row => {
- val value = row.toSeq.mkString(",")
- val seq = value.substring(1, value.length - 1).split(",")
- targetPartitionColNames.zipWithIndex.map {
- case (col, idx) => s"$col = ${seq(idx)}"
- }.mkString("(", " and ", ")")
- })
- .mkString("(", " or ", ")")
- s"""union all
- |select * from $fullTablePath where $partitionWhereClause and $idempotentQueryClause""".stripMargin
- }
-}
-
-@source(types = Array("temp"))
-@sink(types = Array("temp"))
-class TempDataSource extends HiveDataSource {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- df.createOrReplaceTempView(step.target.asInstanceOf[DBDataSourceConfig].getTableName)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSource.scala
deleted file mode 100644
index e7be34e..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSource.scala
+++ /dev/null
@@ -1,176 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.github.sharpdata.sharpetl.core.annotation.source
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.datasource.config.HttpDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.utils.{ETLSparkSession, HttpStatusUtils}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.jayway.jsonpath.JsonPath
-import net.minidev.json.JSONArray
-import org.apache.http.HttpHost
-import org.apache.http.client.config.RequestConfig
-import org.apache.http.client.methods.{HttpGet, HttpPost, HttpRequestBase}
-import org.apache.http.entity.StringEntity
-import org.apache.http.impl.client._
-import org.apache.http.util.EntityUtils
-import org.apache.spark.sql.types.{StringType, StructField, StructType}
-import org.apache.spark.sql.{DataFrame, Row, SparkSession}
-
-import java.net.URLEncoder
-import scala.util.Using
-
-@source(types = Array("http"))
-class HttpDataSource extends Source[DataFrame, SparkSession] {
-
- var httpClient: CloseableHttpClient = _
-
- lazy val mapper = new ObjectMapper
-
- def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val config = step.getSourceConfig[HttpDataSourceConfig]
- val responseBody = getHttpResponseBody(config)
- transformerDF(responseBody, config)
- }
-
- private def getHttpResponseBody(config: HttpDataSourceConfig): String = {
-
- val httpProperties = HttpProperties.initHttpProperties(config)
- val httpRequest = httpProperties.initRequest()
- ETLLogger.info(s"url: ${httpRequest.getURI.toString}")
- if (httpClient == null) {
- httpClient = HttpClients.createDefault()
- }
- Using(httpClient.execute(httpRequest)) { response =>
- if (!HttpStatusUtils.isSuccessful(response.getStatusLine)) {
- throw new HttpBadRequestException(s"${httpRequest.getURI.toString} failed with statusCode ${response.getStatusLine.getStatusCode}")
- }
- val resp = EntityUtils.toString(response.getEntity)
- val LOG_MAX_LENGTH = 10000
- ETLLogger.info(s"response length: ${resp.length}")
- ETLLogger.info(if (resp.length > LOG_MAX_LENGTH) {
- resp.substring(0, LOG_MAX_LENGTH)
- } else {
- resp
- })
- resp
- }.get
- }
-
- private def transformerDF(value: String, config: HttpDataSourceConfig): DataFrame = {
- val fieldName = config.fieldName
- val jsonPath = config.jsonPath
- val splitBy = config.splitBy
-
- var result = value
- if (jsonPath != "$" || splitBy != "") {
- val values = JsonPath.read[Object](value, jsonPath)
- result = values match {
- case jsonArray: JSONArray =>
- if (splitBy != "") {
- jsonArray.toArray()
- .map {
- case s: String => s
- case o: Any => mapper.writeValueAsString(o)
- }
- .mkString(splitBy)
- } else {
- jsonArray.toJSONString()
- }
- case _ =>
- if (jsonPath != "$") {
- mapper.writeValueAsString(values)
- } else {
- value
- }
- }
- }
-
- val row = Row(result)
- val schema = StructType(List(StructField(fieldName, StringType, nullable = false)))
- ETLSparkSession
- .sparkSession
- .createDataFrame(ETLSparkSession.sparkSession.sparkContext.parallelize(Seq(row)), schema)
- }
-
-}
-
-object HttpProperties {
-
- def getEncodeUrl(url: String): String = {
- if (url == null) {
- throw new HttpBadVariableException("HttpRequest url can not be null")
- }
- if (!url.contains("?")) url else handleUrl(url)
- }
-
- private def handleUrl(url: String): String = {
- val arguments = url.split("\\?")
- val (baseUrl, requestParameters) = (arguments(0) + "?", arguments(1))
- val params = requestParameters.split("&")
- baseUrl + params.map(encodeParams).mkString("&")
- }
-
- private def encodeParams(it: String): String = {
- val keyAndValue = it.split("=")
- s"""${URLEncoder.encode(keyAndValue(0), "UTF-8")}=${URLEncoder.encode(keyAndValue(1), "UTF-8")}"""
- }
-
- def initHttpProperties(config: HttpDataSourceConfig): HttpProperties = {
- val url = getEncodeUrl(config.url)
- val httpMethod = config.httpMethod
- val connectionName = config.connectionName
- if (!isNullOrEmpty(connectionName)) {
- val httpConnectionProperties = ETLConfig.getHttpProperties(connectionName)
- val headers = httpConnectionProperties
- .filter(_._1.startsWith("header."))
- .map { case (key, value) => key.substring("header.".length, key.length) -> value }
-
- val proxyProperties = httpConnectionProperties
- .filter(_._1.startsWith("proxy."))
- .map { case (key, value) => key.substring("proxy.".length, key.length) -> value }
- val proxyHost = proxyProperties.get("host")
- if (proxyHost.isEmpty) {
- throw new HttpBadVariableException("Http proxy host can not be null")
- }
- val proxyPort = proxyProperties.getOrElse("port", "8080").toInt
- val proxy = new HttpHost(proxyHost.get, proxyPort)
- new HttpProperties(url, httpMethod, headers, Option(proxy), config)
- } else {
- new HttpProperties(url, httpMethod, Map.empty, Option.empty, config)
- }
- }
-}
-
-class HttpProperties(url: String, httpMethod: String, headers: Map[String, String], proxy: Option[HttpHost], config: HttpDataSourceConfig) {
-
- def initRequest(): HttpRequestBase = {
- val httpRequest = httpMethod.toUpperCase match {
- case "GET" => new HttpGet(url)
- case _ =>
- val httpPost = new HttpPost(url)
- if (!isNullOrEmpty(config.requestBody)) {
- val requestBody = config.requestBody
- ETLLogger.info(s"request the $url with the body $requestBody")
- httpPost.setEntity(new StringEntity(requestBody))
- httpPost.addHeader("Content-type", "application/json")
- }
- httpPost
- }
- if (headers.nonEmpty) {
- headers.foreach((entry: (String, String)) => httpRequest.addHeader(entry._1, entry._2))
- }
- if (proxy.isDefined) {
- httpRequest.setConfig(RequestConfig.custom().setProxy(proxy.get).build())
- }
- httpRequest
- }
-}
-
-class HttpBadVariableException(message: String) extends RuntimeException(message)
-
-class HttpBadRequestException(message: String) extends RuntimeException(message)
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpFileDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpFileDataSource.scala
deleted file mode 100644
index ff36c73..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpFileDataSource.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation.source
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.datasource.config.HttpFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.commons.io.{FileUtils, FilenameUtils}
-import org.apache.http.HttpResponse
-import org.apache.http.client.{ClientProtocolException, ResponseHandler}
-import org.apache.http.impl.client._
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import java.io.{File, IOException}
-import java.nio.file.Paths
-
-// $COVERAGE-OFF$
-@source(types = Array("http_file"))
-class HttpFileDataSource extends Source[DataFrame, SparkSession] {
- var httpClient: CloseableHttpClient = _
-
- def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val config = step.getSourceConfig[HttpFileDataSourceConfig]
- downloadFile(config)
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
-
- private def downloadFile(config: HttpFileDataSourceConfig): File = {
-
- val httpProperties = HttpProperties.initHttpProperties(config)
- val httpRequest = httpProperties.initRequest()
- ETLLogger.info(s"url: ${httpRequest.getURI.toString}")
- if (httpClient == null) {
- httpClient = HttpClients.createDefault()
- }
-
- val descDirPath = config.tempDestinationDir
- val sourceFileName = FilenameUtils.getName(httpRequest.getURI.toString)
- val localDescPath = Paths.get(descDirPath, sourceFileName)
- httpClient.execute(httpRequest, new HttpDownloadResponseHandler(localDescPath.toFile))
-
- val hdfsDir = config.hdfsDir
- val hdfsDescPath = Paths.get(hdfsDir, sourceFileName).toString
- ETLLogger.info(s"upload the local file ${localDescPath.toString} to hdfs ${hdfsDescPath}")
- HDFSUtil.moveFromLocal(localDescPath.toString, hdfsDescPath)
-
- localDescPath.toFile
- }
-
-}
-
-class HttpDownloadResponseHandler(val target: File) extends ResponseHandler[File] {
- @throws[ClientProtocolException]
- @throws[IOException]
- def handleResponse(response: HttpResponse): File = {
- val source = response.getEntity.getContent
- FileUtils.copyInputStreamToFile(source, this.target)
- this.target
- }
-
-}
-
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/JsonDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/JsonDataSource.scala
deleted file mode 100644
index a0bf313..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/JsonDataSource.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.datasource.config.JsonDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-import com.github.sharpdata.sharpetl.core.annotation._
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-@source(types = Array("json"))
-class JsonDataSource extends Source[DataFrame, SparkSession] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val sourceConfig = step.getSourceConfig[JsonDataSourceConfig]
- executionContext
- .read
- .option("multiline", sourceConfig.getMultiline.toBoolean)
- .option("mode", sourceConfig.getMode)
- .options(sourceConfig.getOptions())
- .json(sourceConfig.getFilePath)
- .selectExpr(
- s"'${StringUtil.getFileNameFromPath(sourceConfig.getFilePath)}' as file_name",
- "*"
- )
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ScpDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ScpDataSource.scala
deleted file mode 100644
index 9695738..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/ScpDataSource.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.spark.utils.JSchUtil.{disconnect, exec, getSession}
-import com.jcraft.jsch.Session
-import com.github.sharpdata.sharpetl.core.datasource.config.TextFileDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.connection.ScpConnection
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger, HDFSUtil, IOUtil, StringUtil}
-import org.apache.hadoop.io.compress.CompressionCodecFactory
-import org.apache.ivy.plugins.repository.ssh.Scp
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-object ScpDataSource {
-
- def load(
- spark: SparkSession,
- step: WorkflowStep,
- job: JobLog): DataFrame = {
- val sourceConfig = step.getSourceConfig[TextFileDataSourceConfig]
- val prefix = sourceConfig.getConfigPrefix
- val jschConfig = new ScpConnection(prefix)
-
- val remoteFilePath = sourceConfig.getFilePath
- val fileName = StringUtil.getFileNameFromPath(remoteFilePath)
- val localTempFilePath = StringUtil.concatFilePath(jschConfig.getLocalTempDir, fileName)
- val hdfsTempFilePath = CompressionCodecFactory.removeSuffix(
- StringUtil.concatFilePath(jschConfig.getHdfsTempDir, fileName),
- sourceConfig.codecExtension
- )
-
- IOUtil.mkdirs(jschConfig.getLocalTempDir)
- scpFrom(jschConfig, remoteFilePath, localTempFilePath)
-
- HDFSUtil.mkdirs(jschConfig.getHdfsTempDir)
- HDFSUtil.put(
- localTempFilePath,
- hdfsTempFilePath,
- sourceConfig.getCodecExtension,
- sourceConfig.getDecompress.toBoolean
- )
- IOUtil.delete(localTempFilePath)
-
- sourceConfig.setFilePath(hdfsTempFilePath)
- //TODO: [[com.github.sharpdata.sharpetl.common.model.LoadFileLog]]
- /*val loadFileLog = new LoadFileLog
- loadFileLog.setFilePath(remoteFilePath)
- loadFileLog.setTaskId(job.jobId)
- if (!source.getOnlyOneName.toBoolean) {
- TaskDataAccessor.newLoadFileLog(loadFileLog)
- }*/
- spark.emptyDataFrame
- }
-
- def listFilePath(step: WorkflowStep): List[String] = {
- val session = try {
- getSession(step.getSourceConfig[TextFileDataSourceConfig].getConfigPrefix)
- } catch {
- case e: Exception =>
- ETLLogger.error("Get Session failed.", e)
- throw e
- }
- try {
- listFilePath(session, step)
- } finally {
- disconnect(session)
- }
- }
-
- def listFilePath(session: Session, step: WorkflowStep): List[String] = {
- val sourceConfig = step.getSourceConfig[TextFileDataSourceConfig]
- var dir = ETLConfig.getProperty(s"${sourceConfig.getConfigPrefix}.scp.dir")
- if (sourceConfig.getFileDir != null && sourceConfig.getFileDir != "") {
- dir = sourceConfig.getFileDir
- }
- listFilePath(
- session,
- dir,
- sourceConfig.getFileNamePattern
- )
- }
-
- def listFilePath(
- session: Session,
- dir: String,
- fileNamePattern: String): List[String] = {
- val command =
- s"""
- |cd $dir
- |ls
- |""".stripMargin
- val ret = exec(session, command)
- ret
- .last
- .split("\n")
- .flatMap(_.split("\t").flatMap(_.split(" ").map(_.trim)))
- .filter(fileName => fileNamePattern.r.findFirstMatchIn(fileName).isDefined)
- .map(fileName => StringUtil.concatFilePath(dir, fileName))
- .toList
- }
-
- def scpFrom(
- prefix: String,
- remoteFile: String,
- localTarget: String): String = {
- val jschConfig = new ScpConnection(prefix)
- scpFrom(jschConfig, remoteFile, localTarget)
- }
-
- def scpFrom(
- jschConfig: ScpConnection,
- remoteFile: String,
- localTarget: String): String = {
- val session = getSession(jschConfig)
- val scp = new Scp(session)
- try {
- scpFrom(scp, remoteFile, localTarget)
- } catch {
- case e: Exception =>
- ETLLogger.error(s"Scp from remote '$remoteFile' to local '$localTarget' failed.", e)
- throw e
- } finally {
- disconnect(session)
- }
- }
-
- def scpFrom(
- session: Session,
- remoteFile: String,
- localTarget: String): String = {
- val scp = new Scp(session)
- scpFrom(scp, remoteFile, localTarget)
- }
-
- def scpFrom(
- scp: Scp,
- remoteFile: String,
- localTarget: String): String = {
- ETLLogger.info(s"Scp from remote '$remoteFile' to local '$localTarget'")
- scp.get(remoteFile, localTarget)
- localTarget
- }
-
- def delete(
- prefix: String,
- path: String): Unit = {
- val session = getSession(prefix)
- val command = s"rm -f $path"
- exec(session, command)
- disconnect(session)
- }
-
- def delete(
- session: Session,
- path: String): Unit = {
- val command = s"rm -f $path"
- exec(session, command)
- }
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/StreamingDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/StreamingDataSource.scala
deleted file mode 100644
index 0b7cdca..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/StreamingDataSource.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.datasource.kafka.KafkaConfig
-import com.github.sharpdata.sharpetl.core.datasource.config.{DataSourceConfig, StreamingKafkaDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.exception.Exception.UnsupportedStreamingDataSourceException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.apache.spark.sql.Row
-import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming.dstream.DStream
-import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
-import org.apache.spark.streaming.kafka010.KafkaUtils
-import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
-
-object StreamingDataSource {
- def createDStream(step: WorkflowStep,
- jobLog: JobLog,
- streamingContext: StreamingContext
- ): DStream[Row] = {
- val dataSourceConfig: DataSourceConfig = step.getSourceConfig
- dataSourceConfig match {
- case kafkaDataSourceConfig: StreamingKafkaDataSourceConfig =>
- KafkaDataSource.createDStream(
- streamingContext,
- kafkaDataSourceConfig
- )
- case _ =>
- throw UnsupportedStreamingDataSourceException(dataSourceConfig.dataSourceType)
- }
- }
-}
-
-private object KafkaDataSource {
- def createDStream(streamingContext: StreamingContext,
- kafkaDataSourceConfig: StreamingKafkaDataSourceConfig): DStream[Row] = {
- val groupId = kafkaDataSourceConfig.getGroupId
- val topics = kafkaDataSourceConfig.getTopics.split(",").map(_.trim).toSet
-
- val kafkaParams: Map[String, Object] = KafkaConfig.buildNativeKafkaProducerConfig(groupId)
- KafkaUtils
- .createDirectStream[String, String](
- streamingContext,
- PreferConsistent,
- Subscribe[String, String](topics, kafkaParams)
- )
- .map(consumerRecord => Row(consumerRecord.value()))
- }
-}
-
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TextDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TextDataSource.scala
deleted file mode 100644
index 7454489..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TextDataSource.scala
+++ /dev/null
@@ -1,155 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.datasource.config.{DBDataSourceConfig, TextFileDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.CodecUtil
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.datasource.hive.HiveMetaStoreUtil
-import org.apache.spark.sql.types.StructType
-
-import java.io.File
-
-// scalastyle:off
-import org.apache.hadoop.io.{LongWritable, Text}
-import org.apache.hadoop.mapred.{FileInputFormat, JobConf, TextInputFormat}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.{DataFrame, Row, SparkSession}
-// scalastyle:on
-
-@source(types = Array("text"))
-class TextDataSource extends Source[DataFrame, SparkSession] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- load(executionContext, step, jobLog)
- }
-
- private def loadRdd(spark: SparkSession, sourceConfig: TextFileDataSourceConfig): RDD[String] = {
- val jobConf = new JobConf()
-
- val codec = CodecUtil.matchCodec(sourceConfig.getCodecExtension)
- if (codec.isDefined) {
- jobConf.set("io.compression.codecs", codec.get)
- }
- for (key <- sourceConfig.getOptions.keys) {
- jobConf.set(key, sourceConfig.getOptions()(key))
- }
-
- FileInputFormat.setInputPaths(jobConf, sourceConfig.getFilePath)
- spark.sparkContext
- .hadoopRDD(
- jobConf,
- classOf[TextInputFormat],
- classOf[LongWritable],
- classOf[Text]
- )
- .map { case (_, text) =>
- new String(text.getBytes, 0, text.getLength, sourceConfig.getEncoding())
- }
- }
-
- def load(
- spark: SparkSession,
- step: WorkflowStep,
- job: JobLog): DataFrame = {
- val sourceConfig = step.getSourceConfig[TextFileDataSourceConfig]
- val filePath: String = sourceConfig.getFilePath
- val rdd = loadRdd(spark, sourceConfig)
- val dataSourceConfig = step.getTargetConfig[DBDataSourceConfig]
- val targetTable: String = dataSourceConfig.getTableName
- val struct: StructType = HiveMetaStoreUtil.getHiveTableStructType(dataSourceConfig.dbName, targetTable)
- val columnsCount = struct.length - 3
- val fileName = filePath.substring(filePath.lastIndexOf(File.separator) + 1)
- val publicFields = Array(
- job.jobId,
- fileName,
- job.dataRangeEnd
- )
- val rowRDD = if (sourceConfig.getSeparator == null) {
- val fieldLengthConfig = sourceConfig.getFieldLengthConfig
- substrRowRDD(
- rdd,
- sourceConfig.getEncoding,
- fieldLengthConfig,
- sourceConfig.getStrictColumnNum.toBoolean,
- columnsCount,
- publicFields
- )
- } else {
- splitRowRDD(
- rdd,
- sourceConfig.getSeparator,
- columnsCount,
- sourceConfig.getStrictColumnNum.toBoolean,
- publicFields
- )
- }
-
- spark.createDataFrame(rowRDD, struct)
- }
-
- private def substrRowRDD(
- rdd: RDD[String],
- encoding: String,
- fieldLengthConfig: String,
- strictColumnNum: Boolean,
- columnsCount: Int,
- publicFields: Array[String]): RDD[Row] = {
- val fieldLengthArray = fieldLengthConfig
- .split(",")
- .take(columnsCount)
- .map(_.toInt)
- val fieldStartEndIndexArray = fieldLengthArray.scanLeft(0)(_ + _)
- val lineLength = fieldStartEndIndexArray.last
- val filterFunction = if (strictColumnNum) {
- (s: String) => s.getBytes(encoding).length == lineLength
- } else {
- (s: String) => s.getBytes(encoding).length >= lineLength
- }
- rdd
- .filter(filterFunction)
- .map(str => {
- val arr = new Array[String](fieldLengthArray.length)
- val bytes = str.getBytes(encoding)
- for (i <- arr.indices) {
- arr(i) = new String(
- java.util.Arrays.copyOfRange(bytes, fieldStartEndIndexArray(i), fieldStartEndIndexArray(i + 1)),
- encoding
- )
- }
- Row.fromSeq(
- Array.concat(
- arr,
- publicFields
- )
- )
- })
- }
-
- private def splitRowRDD(
- rdd: RDD[String],
- separator: String,
- columnsCount: Int,
- strictColumnNum: Boolean,
- publicFields: Array[String]): RDD[Row] = {
- rdd
- .filter(s => {
- if (strictColumnNum) {
- s.split(separator).length == columnsCount
- } else {
- s.split(separator).length >= columnsCount
- }
- })
- .map(str => {
- Row.fromSeq(
- Array.concat(
- str.split(separator).take(columnsCount).asInstanceOf[Array[String]],
- publicFields
- )
- )
- })
- }
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TransformationDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TransformationDataSource.scala
deleted file mode 100644
index f2583f9..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/TransformationDataSource.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.TransformationDataSourceConfig
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.spark.utils.ReflectUtil
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import java.sql.Timestamp
-import java.time.Instant
-
-@source(types = Array("transformation"))
-@sink(types = Array("transformation"))
-class TransformationDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- val sourceConfig = step.getSourceConfig[TransformationDataSourceConfig]
-
- val start = "dataRangeStart" -> variables.getOrElse("${DATA_RANGE_START}", "")
- val end = "dataRangeEnd" -> variables.getOrElse("${DATA_RANGE_END}", "")
- val jobId = "jobId" -> variables.getOrElse("'${JOB_ID}'", "")
- val jobName = "workflowName" -> variables.getOrElse("${JOB_NAME}", "")
- val jobTime = "jobTime" -> Timestamp.from(Instant.now()).toString
- val sql = "sql" -> step.sql
-
- ReflectUtil
- .execute(
- sourceConfig.className,
- sourceConfig.methodName,
- sourceConfig.transformerType,
- sourceConfig.args + start + end + jobId + jobTime + sql + jobName)
- .asInstanceOf[DataFrame]
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val targetConfig = step.getTargetConfig[TransformationDataSourceConfig]
-
- ReflectUtil
- .execute(
- targetConfig.className,
- targetConfig.methodName,
- targetConfig.transformerType,
- df,
- step,
- targetConfig.args,
- variables ++ Map("sql" -> step.sql)
- )
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFDataSource.scala
deleted file mode 100644
index 3e11552..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFDataSource.scala
+++ /dev/null
@@ -1,201 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.{ClassDataSourceConfig, PmmlDataSourceConfig, UDFDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.extension.UDFExtension
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import scala.reflect.runtime.universe.{MethodSymbol, TermName}
-import scala.reflect.runtime.{universe => ru}
-
-case class ClassInfo(
- instanceMirror: ru.InstanceMirror,
- defaultMethodSymbol: ru.MethodSymbol,
- methodSymbols: Map[String, ru.MethodSymbol]) {
- def invoke(args: Any*): Any = {
- instanceMirror.reflectMethod(defaultMethodSymbol)(args)
- }
-}
-
-@source(types = Array("class", "object", "pmml"))
-@sink(types = Array("udf"))
-class UDFConfigExtension extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
- private val METHOD_NAME_APPLY = "apply"
- @transient private val classMirror: ru.Mirror = ru.runtimeMirror(getClass.getClassLoader)
- @transient private val classInfoMap: collection.mutable.Map[String, ClassInfo] =
- collection.mutable.Map[String, ClassInfo]()
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- apply(executionContext, step.getSourceConfig)
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = registerUDF(df.sparkSession, step)
-
- def apply(spark: SparkSession, sourceConfig: ClassDataSourceConfig): DataFrame = {
- val args = getClassArgs(sourceConfig)
- apply(
- sourceConfig.getDataSourceType,
- sourceConfig.getClassName,
- args: _*
- )
- spark.emptyDataFrame
- }
-
- def apply(classType: String, className: String, args: Any*): ClassInfo = this.synchronized {
- val classInfoKey = (classType +: (className +: args)).mkString
- if (!classInfoMap.isDefinedAt(classInfoKey)) {
- val classInfo = classType.toLowerCase match {
- case DataSourceType.CLASS | DataSourceType.PMML =>
- reflectClass(className, args: _*)
- case DataSourceType.OBJECT =>
- reflectObject(className)
- }
- classInfoMap += classInfoKey -> classInfo
- ETLLogger.info(s"Dynamic load class: $classInfo")
- }
- classInfoMap(classInfoKey)
- }
-
- private def buildClassInfo(
- instanceMirror: ru.InstanceMirror,
- methodSymbols: Iterable[MethodSymbol]): ClassInfo = {
- val defaultMethodSymbol = methodSymbols.head
- val methodSymbolMap = methodSymbols
- .map(symbol => symbol.name.toString -> symbol)
- .toMap
- ClassInfo(
- instanceMirror,
- defaultMethodSymbol,
- methodSymbolMap
- )
- }
-
- private def reflectClass(className: String, args: Any*): ClassInfo = {
- val runtimeMirror = ru.runtimeMirror(getClass.getClassLoader)
- val classSymbol = runtimeMirror.staticClass(className).asClass
- val methodSymbols = classSymbol
- .typeSignature
- .members
- .filter(_.isMethod)
- .map(_.asMethod)
- val constructors = methodSymbols.filter(_.isConstructor)
- val constructorMirror = runtimeMirror
- .reflectClass(classSymbol)
- .reflectConstructor(constructors.head.asMethod)
- val instance = constructorMirror()
- val instanceMirror = runtimeMirror.reflect(instance)
- val applyMethodSymbol = methodSymbols
- .find(symbol => symbol.name == TermName(METHOD_NAME_APPLY) &&
- symbol.paramLists.head.size == args.size)
- if (applyMethodSymbol.isDefined) {
- instanceMirror.reflectMethod(applyMethodSymbol.get)(args: _*)
- }
- buildClassInfo(
- instanceMirror,
- methodSymbols
- )
- }
-
- private def reflectObject(className: String): ClassInfo = {
- val moduleSymbol = classMirror.staticModule(className)
- val moduleMirror = classMirror.reflectModule(moduleSymbol)
- val instanceMirror = classMirror.reflect(moduleMirror.instance)
- val methodSymbols = moduleSymbol
- .typeSignature
- .members
- .filter(_.isMethod)
- .map(_.asMethod)
- buildClassInfo(
- instanceMirror,
- methodSymbols
- )
- }
-
- def getClassArgs(sourceConfig: ClassDataSourceConfig): Seq[String] = {
- sourceConfig match {
- case config: PmmlDataSourceConfig =>
- Seq(config.getPmmlFileName)
- case _ =>
- Nil
- }
- }
-
- def registerUDF(spark: SparkSession, step: WorkflowStep): Unit = {
- val sourceConfig = step.getSourceConfig[ClassDataSourceConfig]
- val targetConfig = step.getTargetConfig[UDFDataSourceConfig]
- val args = getClassArgs(sourceConfig)
- UDFExtension.registerUDF(
- spark,
- sourceConfig.getDataSourceType,
- targetConfig.getUdfName,
- sourceConfig.getClassName,
- targetConfig.getMethodName,
- args: _*
- )
- }
-
- def generateFunction(
- classType: String,
- className: String,
- methodName: Option[String],
- argumentsNum: Int,
- classArgs: Any*): AnyRef = {
- lazy val udfClassInfo = this.apply(classType, className, classArgs: _*)
- lazy val instanceMirror = udfClassInfo.instanceMirror
- lazy val methodSymbol = udfClassInfo.methodSymbols(methodName.getOrElse(METHOD_NAME_APPLY))
- argumentsNum match {
- case 0 => new (() => Any) with Serializable {
- override def apply(): Any = {
- try {
- instanceMirror.reflectMethod(methodSymbol)()
- } catch {
- case e: Exception =>
- ETLLogger.error(e.getMessage)
- throw e
- }
- }
- }
- case 1 => new (Any => Any) with Serializable {
- override def apply(v1: Any): Any = {
- try {
- instanceMirror.reflectMethod(methodSymbol)(v1)
- } catch {
- case e: Exception =>
- ETLLogger.error(e.getMessage)
- throw e
- }
- }
- }
- case 2 => new ((Any, Any) => Any) with Serializable {
- override def apply(v1: Any, v2: Any): Any = {
- try {
- instanceMirror.reflectMethod(methodSymbol)(v1, v2)
- } catch {
- case e: Exception =>
- ETLLogger.error(e.getMessage)
- throw e
- }
- }
- }
- case 3 => new ((Any, Any, Any) => Any) with Serializable {
- override def apply(v1: Any, v2: Any, v3: Any): Any = {
- try {
- instanceMirror.reflectMethod(methodSymbol)(v1, v2, v3)
- } catch {
- case e: Exception =>
- ETLLogger.error(e.getMessage)
- throw e
- }
- }
- }
- }
- }
-}
-
-object UDFConfigExtension extends UDFConfigExtension
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/VariablesDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/VariablesDataSource.scala
deleted file mode 100644
index d3abf2b..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/VariablesDataSource.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Sink
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.spark.utils.VariablesUtil
-import org.apache.spark.sql.DataFrame
-
-@source(types = Array("variables"))
-@sink(types = Array("variables"))
-class VariablesDataSource extends Sink[DataFrame] {
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- VariablesUtil.setVariables(df, variables)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/connection/JdbcConnection.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/connection/JdbcConnection.scala
deleted file mode 100644
index 96e543d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/connection/JdbcConnection.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.connection
-
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, StringUtil}
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-import java.sql.{Connection, DriverManager}
-
-case class JdbcConnection(connectionName: String, jdbcType: String) {
-
- private val _prefix = StringUtil.getPrefix(connectionName)
-
- private lazy val defaultConfig: Map[String, String] = {
- Map(
- JDBCOptions.JDBC_URL -> ETLConfig.getProperty(s"${_prefix}${jdbcType}.url"),
- "user" -> ETLConfig.getProperty(s"${_prefix}${jdbcType}.user"),
- "password" -> ETLConfig.getProperty(s"${_prefix}${jdbcType}.password"),
- JDBCOptions.JDBC_DRIVER_CLASS -> ETLConfig.getProperty(s"${_prefix}${jdbcType}.driver"),
- // 每次获取数据的行数,在 jdbc 上设成较小值有利于性能优化,默认为 10
- JDBCOptions.JDBC_BATCH_FETCH_SIZE -> ETLConfig.getProperty(s"${_prefix}${jdbcType}.fetchsize")
- )
- }
-
- def getConnection(): Connection = {
- Class.forName(defaultConfig(JDBCOptions.JDBC_DRIVER_CLASS))
- DriverManager.getConnection(defaultConfig(JDBCOptions.JDBC_URL), defaultConfig("user"), defaultConfig("password"))
- }
-
- def buildConfig(options: Map[String, String]): Map[String, String] = {
- options ++ defaultConfig
- }
-
- def getDefaultConfig: Map[String, String] = {
- defaultConfig
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSource.scala
deleted file mode 100644
index 1f3d3c8..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSource.scala
+++ /dev/null
@@ -1,377 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.JdbcDataType._
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode.{APPEND, DELETE, EXECUTE, OVER_WRITE, UPSERT}
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, JdbcDefaultOptions}
-import com.github.sharpdata.sharpetl.spark.datasoruce.jdbc.AbstractJdbcDataSource.addTaskCompletionListener
-import com.github.sharpdata.sharpetl.spark.datasource.connection.JdbcConnection
-import com.github.sharpdata.sharpetl.spark.utils.JdbcUtils.createConnectionFactory
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
-import org.apache.spark.sql.types._
-import org.apache.spark.sql.{DataFrame, Row, SparkSession}
-import org.apache.spark.util.LongAccumulator
-
-import java.sql.{Connection, PreparedStatement, ResultSet}
-import scala.util.control.NonFatal
-
-abstract class AbstractJdbcDataSource(jdbcType: String) extends Serializable with Source[DataFrame, SparkSession] with Sink[DataFrame] {
-
- override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
- load(executionContext, step, variables)
- }
-
- override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- save(df, step)
- }
-
- def buildSelectSql(selectSql: String): String = s"($selectSql)"
-
- def load(sparkSession: SparkSession, step: WorkflowStep, variables: Variables): DataFrame = {
- val sourceConfig = step.getSourceConfig[DBDataSourceConfig]
- sourceConfig.setLowerBound(variables.getOrElse("${lowerBound}", sourceConfig.getLowerBound))
- sourceConfig.setUpperBound(variables.getOrElse("${upperBound}", sourceConfig.getUpperBound))
- sourceConfig.setNumPartitions(variables.getOrElse("${numPartitions}", sourceConfig.getNumPartitions))
-
- val connectionName = Option(sourceConfig.getConnectionName).getOrElse(sourceConfig.getDbName)
- assert(!isNullOrEmpty(connectionName))
- val options = Map(
- JDBCOptions.JDBC_TABLE_NAME -> buildSelectSql(step.getSql),
- JDBCOptions.JDBC_NUM_PARTITIONS -> sourceConfig.getNumPartitions,
- JDBCOptions.JDBC_PARTITION_COLUMN -> sourceConfig.getPartitionColumn,
- JDBCOptions.JDBC_UPPER_BOUND -> sourceConfig.getUpperBound,
- JDBCOptions.JDBC_LOWER_BOUND -> sourceConfig.getLowerBound
- ).++(step.source.getOptions)
- .filter(_._2 != null)
-
- load(sparkSession, options, connectionName)
- }
-
- def load(sparkSession: SparkSession, options: Map[String, String], connectionName: String): DataFrame = {
- val jdbcConfig = buildJdbcConfig(connectionName, jdbcType, options)
- if (jdbcConfig("url") == null) {
- throw new IllegalArgumentException(s"url for JDBC connection is missing from application.properties for prefix: $connectionName")
- }
- if (jdbcConfig("driver") == null) {
- throw new IllegalArgumentException(s"driver for JDBC connection is missing from application.properties for prefix: $connectionName")
- }
- val config = jdbcConfig.filterNot(it => isNullOrEmpty(it._2))
- load(sparkSession, config)
- }
-
- private def load(sparkSession: SparkSession,
- jdbcOptions: Map[String, String]): DataFrame = {
- sparkSession
- .read
- .format("jdbc")
- .options(jdbcOptions)
- .load().cache()
- }
-
- // scalastyle:off
- def loadPartition(schema: StructType,
- options: JDBCOptions,
- getConnection: () => Connection,
- sql: String): Iterator[Row] = {
- var closed = false
- var rs: ResultSet = null
- var stmt: PreparedStatement = null
- var conn: Connection = null
-
- def close(): Unit = {
- if (closed) return
- try {
- if (null != rs) {
- rs.close()
- }
- } catch {
- case e: Exception => ETLLogger.error("Exception closing resultset", e)
- }
- try {
- if (null != stmt) {
- stmt.close()
- }
- } catch {
- case e: Exception => ETLLogger.error("Exception closing statement", e)
- }
- try {
- if (null != conn) {
- if (!conn.isClosed && !conn.getAutoCommit) {
- try {
- conn.commit()
- } catch {
- case NonFatal(e) => ETLLogger.error("Exception committing transaction", e)
- }
- }
- conn.close()
- }
- ETLLogger.info("closed connection")
- } catch {
- case e: Exception => ETLLogger.error("Exception closing connection", e)
- }
- closed = true
- }
-
- addTaskCompletionListener(close)
- conn = getConnection()
-
- stmt = conn.prepareStatement(sql,
- ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
- stmt.setFetchSize(options.fetchSize)
- rs = stmt.executeQuery()
- val result = JdbcUtils.resultSetToRows(rs, schema)
- result
- }
-
- // scalastyle:on
-
- protected val dataTypeMapping: Map[String, DataType] = Map(
- VARCHAR -> StringType,
- CHAR -> StringType,
- DECIMAL -> DoubleType,
- TIMESTAMP -> TimestampType,
- BIGINT -> LongType,
- DOUBLE -> DoubleType,
- NUMERIC -> DoubleType,
- BPCHAR -> StringType
- )
-
- def buildJdbcConfig(connectionName: String, jdbcType: String, options: Map[String, String]): Map[String, String] = {
- JdbcConnection(connectionName, jdbcType).buildConfig(options)
- }
-
- def save(dataFrame: DataFrame, step: WorkflowStep): Unit = {
- val targetConfig = step.getTargetConfig[DBDataSourceConfig]
- val targetTableName = targetConfig.getTableName
- val targetDBName = targetConfig.getDbName
- val (primaryCols, notPrimaryCols) = getCols(targetDBName, targetTableName)
-
- val options = Map(
- JDBCOptions.JDBC_NUM_PARTITIONS -> Option(targetConfig.getNumPartitions).getOrElse(JdbcDefaultOptions.PARTITION_NUM.toString),
- JDBCOptions.JDBC_BATCH_INSERT_SIZE -> Option(targetConfig.getBatchSize).getOrElse(JdbcDefaultOptions.BATCH_SIZE.toString)
- ).++(targetConfig.getOptions)
-
- val jdbcConfig = buildJdbcConfig(targetDBName, jdbcType, Map("dbtable" -> " ") ++ options)
- val getConnection = createConnectionFactory(new JDBCOptions(jdbcConfig))
- val transactionEnabled = targetConfig.transaction.equalsIgnoreCase(true.toString)
- step.getWriteMode match {
- case APPEND =>
- execute(
- getConnection,
- jdbcConfig,
- dataFrame,
- makeInsertSql(targetTableName, primaryCols, notPrimaryCols),
- primaryCols ++ notPrimaryCols,
- transactionEnabled
- )
- case UPSERT =>
- execute(
- getConnection,
- jdbcConfig,
- dataFrame,
- makeUpsertSql(targetTableName, primaryCols, notPrimaryCols),
- makeUpsertCols(primaryCols, notPrimaryCols),
- transactionEnabled
- )
- case DELETE =>
- execute(
- getConnection,
- jdbcConfig,
- dataFrame,
- makeDeleteSql(targetTableName, primaryCols),
- primaryCols,
- transactionEnabled
- )
- case EXECUTE =>
- execute(
- getConnection,
- jdbcConfig,
- dataFrame,
- step.getSql,
- Nil,
- transactionEnabled
- )
- // TODO 待修改,修改为基于逻辑主键覆盖,逻辑逐渐靠参数传递
- case OVER_WRITE =>
- getConnection().prepareStatement(s"truncate table $targetTableName;").execute()
- execute(
- getConnection,
- jdbcConfig,
- dataFrame,
- makeInsertSql(targetTableName, primaryCols, notPrimaryCols),
- primaryCols ++ notPrimaryCols,
- transactionEnabled
- )
- case _ =>
- throw new RuntimeException(s"Not Support write mode: ${step.getWriteMode}.")
- }
- }
-
- private def execute(getConnection: () => Connection,
- jdbcConfig: Map[String, String],
- df: DataFrame,
- sql: String,
- cols: Seq[StructField],
- transactionEnabled: Boolean): Unit = {
- execute(
- getConnection,
- jdbcConfig,
- df.rdd,
- sql,
- cols,
- transactionEnabled
- )
- }
-
- private def execute(getConnection: () => Connection,
- jdbcConfig: Map[String, String],
- rdd: RDD[Row],
- sql: String,
- cols: Seq[StructField],
- transactionEnabled: Boolean): Unit = {
- val partitionNum = jdbcConfig.get("numPartitions").map(_.toInt).getOrElse(JdbcDefaultOptions.PARTITION_NUM)
- val saveCount = rdd.sparkContext.longAccumulator
- val startTs = System.currentTimeMillis()
- val batchSize = jdbcConfig.get("batchsize").map(_.toInt).getOrElse(JdbcDefaultOptions.BATCH_SIZE)
- try {
- rdd
- .repartition(partitionNum)
- .foreachPartition(
- savePartition(
- _,
- getConnection,
- batchSize,
- cols,
- sql,
- saveCount,
- transactionEnabled
- )
- )
- } finally {
- val endTs = System.currentTimeMillis()
- ETLLogger.info(
- s"""
- |=======================================
- |sql: $sql
- |count: ${saveCount.value}
- |total time: ${endTs - startTs} ms
- |=======================================
- |""".stripMargin)
- }
- }
-
- def savePartition(rows: Iterator[Row],
- getConnection: () => Connection,
- batchSize: Int,
- cols: Seq[StructField],
- sql: String,
- saveCount: LongAccumulator,
- transactionEnabled: Boolean
- ): Unit = {
- try {
- val conn: Connection = getConnection()
- var committed = false
- var rowCount = 0
- val useTransaction = transactionEnabled && isSupportsTransactions(conn)
-
- try {
- // 根据连接支持的事务级别设置是否自动提交
- if (useTransaction) {
- ETLLogger.info("JDBC transaction enabled for current execution.")
- conn.setAutoCommit(false)
- }
- val stmt: PreparedStatement = conn.prepareStatement(sql)
-
- try {
- while (rows.hasNext) {
- val row: Row = rows.next()
- fillValueIntoStatement(stmt, row, cols)
- rowCount += 1
- if (rowCount % batchSize == 0) stmt.executeBatch()
- }
- if (rowCount % batchSize > 0) stmt.executeBatch()
- } finally {
- stmt.close()
- }
- if (useTransaction) {
- ETLLogger.info("Committing JDBC transaction...")
- conn.commit()
- }
- committed = true
- if (saveCount != null) saveCount.add(rowCount)
- } finally {
- try {
- if (!committed) {
- if (useTransaction) {
- ETLLogger.info("JDBC transaction not committed, now starting rollback...")
- conn.rollback()
- }
- }
- } finally {
- conn.close()
- }
- }
- }
- }
-
- def isSupportsTransactions(conn: Connection): Boolean = {
- try {
- conn.getMetaData.supportsDataManipulationTransactionsOnly() ||
- conn.getMetaData.supportsDataDefinitionAndDataManipulationTransactions()
- } catch {
- // 非致命性异常
- case NonFatal(e) =>
- throw e
- }
- }
-
- def fillValueIntoStatement(stmt: PreparedStatement, row: Row, cols: Seq[StructField]): Unit = {
- cols.zipWithIndex.foreach {
- case (field, index) =>
- stmt.setObject(
- index + 1,
- row.getAs(field.name)
- )
- }
- stmt.addBatch()
- }
-
- def makeInsertCols(primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): Seq[StructField] = {
- primaryCols ++ notPrimaryCols
- }
-
- def makeInsertSql(tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): String = {
- s"""
- |INSERT INTO ${tableName} (${primaryCols.union(notPrimaryCols).map(_.name).mkString(", ")})
- |VALUES (${primaryCols.union(notPrimaryCols).map(_ => "?").mkString(", ")})
- |""".stripMargin
- }
-
- def getCols(targetDBName: String,
- targetTableName: String): (Seq[StructField], Seq[StructField]) = ???
-
- def makeUpsertCols(primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): Seq[StructField] = ???
-
- def makeUpsertSql(tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): String = ???
-
- def makeDeleteSql(tableName: String,
- primaryCols: Seq[StructField]): String = {
- s"""
- |delete from $tableName
- |where ${primaryCols.map(col => col.name.concat(" = ?")).mkString(" and ")}
- |""".stripMargin
- }
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/ImpalaDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/ImpalaDataSource.scala
deleted file mode 100644
index 6fcd146..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/ImpalaDataSource.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.IMPALA
-import com.github.sharpdata.sharpetl.core.annotation._
-
-@source(types = Array("impala"))
-@sink(types = Array("impala"))
-class ImpalaDataSource extends AbstractJdbcDataSource(IMPALA) {
-
- override def buildSelectSql(selectSql: String): String = s"($selectSql) as t"
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/InformixDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/InformixDataSource.scala
deleted file mode 100644
index 5607d94..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/InformixDataSource.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.INFORMIX
-import com.github.sharpdata.sharpetl.core.annotation._
-
-@source(types = Array("informix"))
-@sink(types = Array("informix"))
-class InformixDataSource extends AbstractJdbcDataSource(INFORMIX)
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MysqlDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MysqlDataSource.scala
deleted file mode 100644
index f2e7dcd..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MysqlDataSource.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.IncompleteDataSourceException
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.MYSQL
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.types._
-
-@source(types = Array("mysql", "h2"))
-@sink(types = Array("mysql", "h2"))
-class MysqlDataSource extends AbstractJdbcDataSource(MYSQL) {
-
- override def buildSelectSql(selectSql: String): String = s"($selectSql) as t"
-
- private val PRIMARY_KEY = "pri"
-
- override def getCols(targetDBName: String, targetTableName: String): (Seq[StructField], Seq[StructField]) = {
- var primaryCols = Seq[StructField]()
- var notPrimaryCols = Seq[StructField]()
- if (StringUtil.isNullOrEmpty(targetDBName)) {
- throw IncompleteDataSourceException("MySQL target database name is empty!")
- }
- if (StringUtil.isNullOrEmpty(targetTableName)) {
- throw IncompleteDataSourceException("MySQL target table name is empty!")
- }
- this
- .load(
- ETLSparkSession.getHiveSparkSession(),
- Map(
- "dbtable" ->
- s"""
- |(
- | SELECT a.table_schema as table_schema,
- | a.table_name as table_name,
- | b.column_name as column_name,
- | b.column_type as column_type,
- | b.column_key as column_key
- | FROM information_schema.TABLES a
- | LEFT JOIN information_schema.COLUMNS b ON a.table_name = b.TABLE_NAME AND
- | a.TABLE_SCHEMA = b.TABLE_SCHEMA
- | WHERE a.TABLE_SCHEMA = '$targetDBName'
- | and a.table_name = '$targetTableName'
- | ORDER BY a.table_name, b.ORDINAL_POSITION
- |) as t
- |""".stripMargin
- ),
- targetDBName
- )
- .collect()
- .foreach(row => {
- val field = row.getAs[String]("column_name")
- val dataType = row.getAs[String]("column_type")
- val key = row.getAs[String]("column_key")
- val dataTypePattern = "(\\S*)\\(\\S*\\)".r
-
- val jdbcDataType: String = dataTypePattern.findFirstMatchIn(dataType.trim.toLowerCase) match {
- case Some(data) => data group 1
- case _ => ""
- }
-
- val sparkSQLDataType: DataType = dataTypeMapping.getOrElse(jdbcDataType, StringType)
-
- if (key.toLowerCase() == PRIMARY_KEY) {
- primaryCols :+= StructField(field, sparkSQLDataType)
- } else {
- notPrimaryCols :+= StructField(field, sparkSQLDataType)
- }
- })
- (primaryCols, notPrimaryCols)
- }
-
- override def makeUpsertCols(primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): Seq[StructField] = {
- primaryCols ++ notPrimaryCols ++ notPrimaryCols
- }
-
- override def makeUpsertSql(tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): String = {
- s"""
- |INSERT INTO ${tableName} (${primaryCols.union(notPrimaryCols).map(_.name).mkString(", ")})
- |VALUES (${primaryCols.union(notPrimaryCols).map(_ => "?").mkString(", ")})
- |ON DUPLICATE KEY UPDATE ${notPrimaryCols.map(_.name).map(col => s"$col = ?").mkString(", ")}
- |""".stripMargin
- }
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/OracleDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/OracleDataSource.scala
deleted file mode 100644
index 21eed6f..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/OracleDataSource.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.ORACLE
-import com.github.sharpdata.sharpetl.core.annotation._
-
-@source(types = Array("oracle"))
-@sink(types = Array("oracle"))
-class OracleDataSource extends AbstractJdbcDataSource(ORACLE)
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/PostgresDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/PostgresDataSource.scala
deleted file mode 100644
index 1b531e8..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/PostgresDataSource.scala
+++ /dev/null
@@ -1,150 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.POSTGRES
-import com.github.sharpdata.sharpetl.core.annotation._
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.types.{DataType, StringType, StructField}
-
-@source(types = Array("postgres"))
-@sink(types = Array("postgres"))
-class PostgresDataSource extends AbstractJdbcDataSource(POSTGRES) {
-
- override def buildSelectSql(selectSql: String): String = s"($selectSql) as t"
-
- private val PRIMARY_KEY = "pri"
-
- override def getCols(targetDBName: String, targetTableName: String): (Seq[StructField], Seq[StructField]) = {
- var primaryCols = Seq[StructField]()
- var notPrimaryCols = Seq[StructField]()
- val (tableSchemaName, tableName) = if (targetTableName.contains('.')) {
- val splittedTableName = targetTableName.split("\\.")
- (splittedTableName(0), splittedTableName(1))
- } else {
- ("public", targetTableName)
- }
- this
- .load(
- ETLSparkSession.getHiveSparkSession(),
- Map(
- "dbtable" ->
- s"""
- |(
- |SELECT a.attnum
- | , a.attname AS field
- | , pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type
- | , t.typname AS type
- | , a.attlen AS length
- | , a.atttypmod AS lengthvar
- | , a.attnotnull AS notnull
- | , b.description AS comment
- | , CASE WHEN pk.field is not null THEN 'PRI' ELSE '' END AS key
- |FROM pg_attribute a
- | inner join pg_class c on a.attrelid = c.oid and a.attnum > 0
- | LEFT JOIN pg_description b
- | ON a.attrelid = b.objoid
- | AND a.attnum = b.objsubid
- | LEFT JOIN pg_constraint e
- | ON a.attrelid = e.conrelid
- | AND e.contype = 'p'
- | AND a.attnum = e.conkey[1]
- | inner join pg_type t on a.atttypid = t.oid
- | inner join pg_tables d on d.tablename = c.relname
- | left join (SELECT pg_attribute.attname as field
- | FROM pg_index,
- | pg_class,
- | pg_attribute,
- | pg_namespace
- | WHERE pg_class.oid = '${appendSchema(tableSchemaName, processUpperCaseTableName(tableName))}'::regclass
- | AND indrelid = pg_class.oid
- | AND nspname = '$tableSchemaName'
- | AND pg_class.relnamespace = pg_namespace.oid
- | AND pg_attribute.attrelid = pg_class.oid
- | AND pg_attribute.attnum = any (pg_index.indkey)
- | AND indisprimary) pk on pk.field = a.attname
- |WHERE 1 = 1
- | AND d.schemaname = '$tableSchemaName'
- | AND c.relname = '${trimTableName(tableName)}'
- | AND c.oid = '${appendSchema(tableSchemaName, processUpperCaseTableName(tableName))}'::regclass
- |ORDER BY a.attnum
- |) as t
- |""".stripMargin
- ),
- targetDBName
- )
- .collect()
- .foreach(row => {
- val field = row.getAs[String]("field")
- val dataType = row.getAs[String]("type")
- val key = row.getAs[String]("key")
- val sparkSQLDataType: DataType = dataTypeMapping.getOrElse(dataType, StringType)
-
- if (key.toLowerCase() == PRIMARY_KEY) {
- primaryCols :+= StructField(field, sparkSQLDataType)
- } else {
- notPrimaryCols :+= StructField(field, sparkSQLDataType)
- }
- })
- (primaryCols, notPrimaryCols)
- }
-
- override def makeUpsertCols(primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): Seq[StructField] = {
- primaryCols ++ notPrimaryCols
- }
-
- override def makeUpsertSql(tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): String = {
- s"""
- |insert into $tableName (${primaryCols.union(notPrimaryCols).map(_.name).mkString(", ")})
- |values (${primaryCols.union(notPrimaryCols).map(_ => "?").mkString(", ")})
- |on conflict (${primaryCols.map(_.name).mkString(", ")})
- |do update
- |set ${notPrimaryCols.map(_.name).map(col => s"$col = excluded.$col").mkString(", ")}
- |""".stripMargin
- }
-
- private def appendSchema(schema: String, table: String) = {
- if (isNullOrEmpty(schema)) {
- table
- } else {
- s"$schema.$table"
- }
- }
-
- private def processUpperCaseTableName(table: String): String = {
- if (table.startsWith("\"")) {
- table
- } else if (table.toLowerCase == table) {
- table
- } else {
- s""""$table""""
- }
- }
-
- private def quote(col: String): String = {
- if (col.startsWith("\"")) {
- col
- } else {
- s""""$col""""
- }
- }
-
- private def trimTableName(table: String): String = {
- if (table.startsWith("\"")) {
- table.replaceAll("\"", "")
- } else {
- table
- }
- }
-
- override def makeInsertSql(tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]): String = {
- s"""
- |INSERT INTO $tableName (${primaryCols.union(notPrimaryCols).map(it => quote(it.name)).mkString(", ")})
- |VALUES (${primaryCols.union(notPrimaryCols).map(_ => "?").mkString(", ")})
- |""".stripMargin
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/SqlServerDataSource.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/SqlServerDataSource.scala
deleted file mode 100644
index c816f1f..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/SqlServerDataSource.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.MS_SQL_SERVER
-import com.github.sharpdata.sharpetl.core.annotation._
-
-@source(types = Array("ms_sql_server"))
-@sink(types = Array("ms_sql_server"))
-class SqlServerDataSource extends AbstractJdbcDataSource(MS_SQL_SERVER) {
-
- override def buildSelectSql(selectSql: String): String = s"($selectSql) as t"
-
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/PmmlUDF.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/PmmlUDF.scala
deleted file mode 100644
index 8836c9d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/PmmlUDF.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, IOUtil, StringUtil}
-
-import java.io.FileInputStream
-import java.util
-import org.apache.spark.sql.Row
-import org.dmg.pmml.FieldName
-import org.jpmml.evaluator.{Evaluator, EvaluatorUtil, FieldValue, InputField, LoadingModelEvaluatorBuilder}
-
-import scala.jdk.CollectionConverters._
-
-// $COVERAGE-OFF$
-class PmmlUDF extends Serializable {
- private val pmmlFileRootDir = "pmml"
-
- private var evaluator: Evaluator = _
-
- def apply(pmmlFileName: String): Unit = {
- evaluator = initEvaluator(pmmlFileName)
- }
-
- private def initEvaluator(pmmlFileName: String): Evaluator = {
- val pmmlPath = IOUtil
- .recursiveListFilesFromResource(pmmlFileRootDir)
- .find(path => pmmlFileName == StringUtil.getFileNameFromPath(path))
- val pmmlIs = if (pmmlPath.isDefined) {
- new FileInputStream(pmmlPath.get)
- } else {
- IOUtil.getInputStreamFromJar(pmmlFileName)
- }
- val evaluator: Evaluator = new LoadingModelEvaluatorBuilder()
- .load(pmmlIs)
- .build()
- evaluator.verify()
- ETLLogger.info(String.format(
- "PMML input fields: \n%s",
- evaluator.getInputFields.asScala.mkString("\n\t")
- ))
- ETLLogger.info(String.format(
- "PMML target fields: \n%s",
- evaluator.getTargetFields.asScala.mkString("\n\t")
- ))
- pmmlIs.close()
- evaluator
- }
-
- private def rowToArgumentsMapper(inputFields: util.List[InputField]): Row => util.Map[FieldName, FieldValue] = {
- val mappers = inputFields.asScala.map(inputField => {
- (row: Row, arguments: util.Map[FieldName, FieldValue]) =>
- arguments.put(inputField.getName, inputField.prepare(row.get(row.fieldIndex(inputField.getName.getValue))))
- })
-
- row: Row => {
- val arguments = new util.HashMap[FieldName, FieldValue]()
- mappers.foreach(mapper => {
- mapper(row, arguments)
- })
- arguments
- }
- }
-
- def predict(row: Row): Map[String, String] = {
- val inputFields = evaluator.getInputFields
- val arguments = rowToArgumentsMapper(inputFields)(row)
- val results: util.Map[FieldName, _] = evaluator.evaluate(arguments)
- results
- .asScala
- .map {
- case (targetFieldName, targetFieldValue) =>
- targetFieldName.getValue -> EvaluatorUtil.decode(targetFieldValue).toString
- }
- .toMap
- }
-
-}
-
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/UdfInitializer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/UdfInitializer.scala
deleted file mode 100644
index bf328aa..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extension/UdfInitializer.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.core.extension.BuiltInFunctions
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType
-import org.apache.spark.sql.SparkSession
-
-object UdfInitializer extends Serializable {
- /**
- * Init built-in UDFs from [[BuiltInFunctions]]
- */
- lazy val init = { (spark: SparkSession) =>
- Seq("powerNullCheck", "arrayJoin", "top", "flatten", "ifEmpty")
- .foreach(func => {
- UDFExtension.registerUDF(
- spark,
- DataSourceType.OBJECT,
- func,
- "com.github.sharpdata.sharpetl.core.extension.BuiltInFunctions",
- func
- )
- })
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkDatabaseMetaData.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkDatabaseMetaData.scala
deleted file mode 100644
index 1b8b249..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkDatabaseMetaData.scala
+++ /dev/null
@@ -1,362 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-
-import java.sql.{Connection, DatabaseMetaData, ResultSet, RowIdLifetime}
-
-// scalastyle:off
-class SparkDatabaseMetaData extends DatabaseMetaData {
- override def allProceduresAreCallable(): Boolean = true
-
- override def allTablesAreSelectable(): Boolean = true
-
- override def getURL: String = ""
-
- override def getUserName: String = ""
-
- override def isReadOnly: Boolean = false
-
- override def nullsAreSortedHigh(): Boolean = true
-
- override def nullsAreSortedLow(): Boolean = false
-
- override def nullsAreSortedAtStart(): Boolean = true
-
- override def nullsAreSortedAtEnd(): Boolean = false
-
- override def getDatabaseProductName: String = "spark_sharp_etl"
-
- override def getDatabaseProductVersion: String = sparkSession.version
-
- override def getDriverName: String = "com.github.sharpdata.sharpetl.spark.extra.driver.SparkJdbcDriver"
-
- override def getDriverVersion: String = "0"
-
- override def getDriverMajorVersion: Int = 0
-
- override def getDriverMinorVersion: Int = 0
-
- override def usesLocalFiles(): Boolean = true
-
- override def usesLocalFilePerTable(): Boolean = true
-
- override def supportsMixedCaseIdentifiers(): Boolean = true
-
- override def storesUpperCaseIdentifiers(): Boolean = true
-
- override def storesLowerCaseIdentifiers(): Boolean = true
-
- override def storesMixedCaseIdentifiers(): Boolean = true
-
- override def supportsMixedCaseQuotedIdentifiers(): Boolean = true
-
- override def storesUpperCaseQuotedIdentifiers(): Boolean = true
-
- override def storesLowerCaseQuotedIdentifiers(): Boolean = true
-
- override def storesMixedCaseQuotedIdentifiers(): Boolean = true
-
- override def getIdentifierQuoteString: String = "`"
-
- override def getSQLKeywords: String = ???
-
- override def getNumericFunctions: String = ???
-
- override def getStringFunctions: String = ???
-
- override def getSystemFunctions: String = ???
-
- override def getTimeDateFunctions: String = ???
-
- override def getSearchStringEscape: String = ???
-
- override def getExtraNameCharacters: String = ???
-
- override def supportsAlterTableWithAddColumn(): Boolean = true
-
- override def supportsAlterTableWithDropColumn(): Boolean = true
-
- override def supportsColumnAliasing(): Boolean = true
-
- override def nullPlusNonNullIsNull(): Boolean = true
-
- override def supportsConvert(): Boolean = true
-
- override def supportsConvert(fromType: Int, toType: Int): Boolean = true
-
- override def supportsTableCorrelationNames(): Boolean = true
-
- override def supportsDifferentTableCorrelationNames(): Boolean = true
-
- override def supportsExpressionsInOrderBy(): Boolean = true
-
- override def supportsOrderByUnrelated(): Boolean = true
-
- override def supportsGroupBy(): Boolean = true
-
- override def supportsGroupByUnrelated(): Boolean = true
-
- override def supportsGroupByBeyondSelect(): Boolean = true
-
- override def supportsLikeEscapeClause(): Boolean = true
-
- override def supportsMultipleResultSets(): Boolean = true
-
- override def supportsMultipleTransactions(): Boolean = true
-
- override def supportsNonNullableColumns(): Boolean = true
-
- override def supportsMinimumSQLGrammar(): Boolean = true
-
- override def supportsCoreSQLGrammar(): Boolean = true
-
- override def supportsExtendedSQLGrammar(): Boolean = true
-
- override def supportsANSI92EntryLevelSQL(): Boolean = true
-
- override def supportsANSI92IntermediateSQL(): Boolean = true
-
- override def supportsANSI92FullSQL(): Boolean = true
-
- override def supportsIntegrityEnhancementFacility(): Boolean = true
-
- override def supportsOuterJoins(): Boolean = true
-
- override def supportsFullOuterJoins(): Boolean = true
-
- override def supportsLimitedOuterJoins(): Boolean = true
-
- override def getSchemaTerm: String = ???
-
- override def getProcedureTerm: String = ???
-
- override def getCatalogTerm: String = ???
-
- override def isCatalogAtStart: Boolean = true
-
- override def getCatalogSeparator: String = ???
-
- override def supportsSchemasInDataManipulation(): Boolean = true
-
- override def supportsSchemasInProcedureCalls(): Boolean = true
-
- override def supportsSchemasInTableDefinitions(): Boolean = true
-
- override def supportsSchemasInIndexDefinitions(): Boolean = true
-
- override def supportsSchemasInPrivilegeDefinitions(): Boolean = true
-
- override def supportsCatalogsInDataManipulation(): Boolean = true
-
- override def supportsCatalogsInProcedureCalls(): Boolean = true
-
- override def supportsCatalogsInTableDefinitions(): Boolean = true
-
- override def supportsCatalogsInIndexDefinitions(): Boolean = true
-
- override def supportsCatalogsInPrivilegeDefinitions(): Boolean = true
-
- override def supportsPositionedDelete(): Boolean = true
-
- override def supportsPositionedUpdate(): Boolean = true
-
- override def supportsSelectForUpdate(): Boolean = true
-
- override def supportsStoredProcedures(): Boolean = true
-
- override def supportsSubqueriesInComparisons(): Boolean = true
-
- override def supportsSubqueriesInExists(): Boolean = true
-
- override def supportsSubqueriesInIns(): Boolean = true
-
- override def supportsSubqueriesInQuantifieds(): Boolean = true
-
- override def supportsCorrelatedSubqueries(): Boolean = true
-
- override def supportsUnion(): Boolean = true
-
- override def supportsUnionAll(): Boolean = true
-
- override def supportsOpenCursorsAcrossCommit(): Boolean = true
-
- override def supportsOpenCursorsAcrossRollback(): Boolean = true
-
- override def supportsOpenStatementsAcrossCommit(): Boolean = true
-
- override def supportsOpenStatementsAcrossRollback(): Boolean = true
-
- override def getMaxBinaryLiteralLength: Int = 0
-
- override def getMaxCharLiteralLength: Int = 0
-
- override def getMaxColumnNameLength: Int = 0
-
- override def getMaxColumnsInGroupBy: Int = 0
-
- override def getMaxColumnsInIndex: Int = 0
-
- override def getMaxColumnsInOrderBy: Int = 0
-
- override def getMaxColumnsInSelect: Int = 0
-
- override def getMaxColumnsInTable: Int = 0
-
- override def getMaxConnections: Int = 0
-
- override def getMaxCursorNameLength: Int = 0
-
- override def getMaxIndexLength: Int = 0
-
- override def getMaxSchemaNameLength: Int = 0
-
- override def getMaxProcedureNameLength: Int = 0
-
- override def getMaxCatalogNameLength: Int = 0
-
- override def getMaxRowSize: Int = 0
-
- override def doesMaxRowSizeIncludeBlobs(): Boolean = true
-
- override def getMaxStatementLength: Int = 0
-
- override def getMaxStatements: Int = 0
-
- override def getMaxTableNameLength: Int = 0
-
- override def getMaxTablesInSelect: Int = 0
-
- override def getMaxUserNameLength: Int = 0
-
- override def getDefaultTransactionIsolation: Int = 0
-
- override def supportsTransactions(): Boolean = true
-
- override def supportsTransactionIsolationLevel(level: Int): Boolean = true
-
- override def supportsDataDefinitionAndDataManipulationTransactions(): Boolean = true
-
- override def supportsDataManipulationTransactionsOnly(): Boolean = true
-
- override def dataDefinitionCausesTransactionCommit(): Boolean = true
-
- override def dataDefinitionIgnoredInTransactions(): Boolean = true
-
- override def getProcedures(catalog: String, schemaPattern: String, procedureNamePattern: String): ResultSet = ???
-
- override def getProcedureColumns(catalog: String, schemaPattern: String, procedureNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getTables(catalog: String, schemaPattern: String, tableNamePattern: String, types: Array[String]): ResultSet = ???
-
- override def getSchemas: ResultSet = ???
-
- override def getCatalogs: ResultSet = ???
-
- override def getTableTypes: ResultSet = ???
-
- override def getColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getColumnPrivileges(catalog: String, schema: String, table: String, columnNamePattern: String): ResultSet = ???
-
- override def getTablePrivileges(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ???
-
- override def getBestRowIdentifier(catalog: String, schema: String, table: String, scope: Int, nullable: Boolean): ResultSet = ???
-
- override def getVersionColumns(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getPrimaryKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getImportedKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getExportedKeys(catalog: String, schema: String, table: String): ResultSet = ???
-
- override def getCrossReference(parentCatalog: String, parentSchema: String, parentTable: String, foreignCatalog: String, foreignSchema: String, foreignTable: String): ResultSet = ???
-
- override def getTypeInfo: ResultSet = ???
-
- override def getIndexInfo(catalog: String, schema: String, table: String, unique: Boolean, approximate: Boolean): ResultSet = ???
-
- override def supportsResultSetType(`type`: Int): Boolean = true
-
- override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = true
-
- override def ownUpdatesAreVisible(`type`: Int): Boolean = true
-
- override def ownDeletesAreVisible(`type`: Int): Boolean = true
-
- override def ownInsertsAreVisible(`type`: Int): Boolean = true
-
- override def othersUpdatesAreVisible(`type`: Int): Boolean = true
-
- override def othersDeletesAreVisible(`type`: Int): Boolean = true
-
- override def othersInsertsAreVisible(`type`: Int): Boolean = true
-
- override def updatesAreDetected(`type`: Int): Boolean = true
-
- override def deletesAreDetected(`type`: Int): Boolean = true
-
- override def insertsAreDetected(`type`: Int): Boolean = true
-
- override def supportsBatchUpdates(): Boolean = true
-
- override def getUDTs(catalog: String, schemaPattern: String, typeNamePattern: String, types: Array[Int]): ResultSet = ???
-
- override def getConnection: Connection = ???
-
- override def supportsSavepoints(): Boolean = true
-
- override def supportsNamedParameters(): Boolean = true
-
- override def supportsMultipleOpenResults(): Boolean = true
-
- override def supportsGetGeneratedKeys(): Boolean = true
-
- override def getSuperTypes(catalog: String, schemaPattern: String, typeNamePattern: String): ResultSet = ???
-
- override def getSuperTables(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ???
-
- override def getAttributes(catalog: String, schemaPattern: String, typeNamePattern: String, attributeNamePattern: String): ResultSet = ???
-
- override def supportsResultSetHoldability(holdability: Int): Boolean = true
-
- override def getResultSetHoldability: Int = 0
-
- override def getDatabaseMajorVersion: Int = 0
-
- override def getDatabaseMinorVersion: Int = 0
-
- override def getJDBCMajorVersion: Int = 0
-
- override def getJDBCMinorVersion: Int = 0
-
- override def getSQLStateType: Int = 0
-
- override def locatorsUpdateCopy(): Boolean = true
-
- override def supportsStatementPooling(): Boolean = true
-
- override def getRowIdLifetime: RowIdLifetime = ???
-
- override def getSchemas(catalog: String, schemaPattern: String): ResultSet = ???
-
- override def supportsStoredFunctionsUsingCallSyntax(): Boolean = true
-
- override def autoCommitFailureClosesAllResultSets(): Boolean = true
-
- override def getClientInfoProperties: ResultSet = ???
-
- override def getFunctions(catalog: String, schemaPattern: String, functionNamePattern: String): ResultSet = ???
-
- override def getFunctionColumns(catalog: String, schemaPattern: String, functionNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def getPseudoColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ???
-
- override def generatedKeyAlwaysReturned(): Boolean = true
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = true
-}
-
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcConnection.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcConnection.scala
deleted file mode 100644
index ae681cb..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcConnection.scala
+++ /dev/null
@@ -1,120 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import java.{sql, util}
-import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLWarning, SQLXML, Savepoint, Statement, Struct}
-import java.util.Properties
-import java.util.concurrent.Executor
-import scala.jdk.CollectionConverters._
-
-// scalastyle:off
-class SparkJdbcConnection extends Connection {
-
- override def createStatement(): Statement = new SparkJdbcStatement()
-
- override def prepareStatement(sql: String): PreparedStatement = new SparkJdbcPreparedStatement(sql)
-
- override def prepareCall(sql: String): CallableStatement = null
-
- override def nativeSQL(sql: String): String = sql
-
- override def setAutoCommit(autoCommit: Boolean): Unit = ()
-
- override def getAutoCommit: Boolean = false
-
- override def commit(): Unit = ()
-
- override def rollback(): Unit = ()
-
- override def close(): Unit = ()
-
- override def isClosed: Boolean = false
-
- override def getMetaData: DatabaseMetaData = new SparkDatabaseMetaData()
-
- override def setReadOnly(readOnly: Boolean): Unit = ()
-
- override def isReadOnly: Boolean = false
-
- override def setCatalog(catalog: String): Unit = ()
-
- override def getCatalog: String = ""
-
- override def setTransactionIsolation(level: Int): Unit = ()
-
- override def getTransactionIsolation: Int = 0
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = null
-
- override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = null
-
- override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = null
-
- override def getTypeMap: util.Map[String, Class[_]] = Map.empty[String, Class[_]].asJava
-
- override def setTypeMap(map: util.Map[String, Class[_]]): Unit = ()
-
- override def setHoldability(holdability: Int): Unit = ()
-
- override def getHoldability: Int = 0
-
- override def setSavepoint(): Savepoint = null
-
- override def setSavepoint(name: String): Savepoint = null
-
- override def rollback(savepoint: Savepoint): Unit = ()
-
- override def releaseSavepoint(savepoint: Savepoint): Unit = ()
-
- override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = null
-
- override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = null
-
- override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = null
-
- override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = null
-
- override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = null
-
- override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = null
-
- override def createClob(): Clob = null
-
- override def createBlob(): Blob = null
-
- override def createNClob(): NClob = null
-
- override def createSQLXML(): SQLXML = null
-
- override def isValid(timeout: Int): Boolean = false
-
- override def setClientInfo(name: String, value: String): Unit = ()
-
- override def setClientInfo(properties: Properties): Unit = ()
-
- override def getClientInfo(name: String): String = ""
-
- override def getClientInfo: Properties = null
-
- override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = null
-
- override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = null
-
- override def setSchema(schema: String): Unit = ()
-
- override def getSchema: String = ""
-
- override def abort(executor: Executor): Unit = ()
-
- override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = ()
-
- override def getNetworkTimeout: Int = 0
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcDriver.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcDriver.scala
deleted file mode 100644
index 6335b77..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcDriver.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import com.github.sharpdata.sharpetl.spark.extra.driver.SparkJdbcDriver.INSTANCE
-
-import java.sql.{Connection, Driver, DriverPropertyInfo}
-import java.util.Properties
-import java.util.logging.Logger
-import java.sql.DriverManager
-import java.sql.SQLException
-
-// scalastyle:off
-class SparkJdbcDriver extends Driver {
-
- private var registered = false
-
- private def load() = {
- try if (!registered) {
- registered = true
- DriverManager.registerDriver(INSTANCE)
- }
- catch {
- case e: SQLException =>
- e.printStackTrace()
- }
- INSTANCE
- }
-
- override def connect(url: String, info: Properties): Connection = new SparkJdbcConnection()
-
- override def acceptsURL(url: String): Boolean = if (url.contains("spark")) true else false
-
- override def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = Array.empty
-
- override def getMajorVersion: Int = 0
-
- override def getMinorVersion: Int = 0
-
- override def jdbcCompliant(): Boolean = false
-
- override def getParentLogger: Logger = null
-}
-
-object SparkJdbcDriver {
- val INSTANCE = new SparkJdbcDriver()
- INSTANCE.load()
-}
-
-// scalastyle:on
\ No newline at end of file
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcPreparedStatement.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcPreparedStatement.scala
deleted file mode 100644
index db73547..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcPreparedStatement.scala
+++ /dev/null
@@ -1,266 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-
-import java.io.{InputStream, Reader}
-import java.net.URL
-import java.sql._
-import java.time.LocalDateTime
-import java.util.Calendar
-import scala.collection.mutable
-
-// scalastyle:off
-class SparkJdbcPreparedStatement(val sql: String) extends PreparedStatement {
- private var resultSet: SparkJdbcResultSet = null
-
- private val parameterMetaData = mutable.Map[Int, Any]()
-
- def escape(value: String): String = value.replace("'", "")
-
- def buildSql: String = {
- parameterMetaData.toList.sortBy(_._1)
- .foldLeft(sql) {
- case (accSql, (_, value)) =>
- value match {
- case _: Int | _: Boolean => accSql.replaceFirst("\\?", value.toString)
- case _: String => accSql.replaceFirst("\\?", s"\'${escape(value.toString)}\'")
- case time: LocalDateTime => accSql.replaceFirst("\\?", s"\'${time.format(L_YYYY_MM_DD_HH_MM_SS)}\'")
- case _ => accSql.replaceFirst("\\?", s"\'${escape(value.toString)}\'")
- }
- }
- }
-
- override def executeQuery(): ResultSet = {
- println(s"[DRIVER] executing sql $buildSql")
- new SparkJdbcResultSet(sparkSession.sql(buildSql), this)
- }
-
- override def executeUpdate(): Int = {
- println(s"[DRIVER] executing sql $buildSql")
- sparkSession.sql(buildSql)
- 0
- }
-
- override def setNull(parameterIndex: Int, sqlType: Int): Unit = parameterMetaData.put(parameterIndex, 0)
-
- override def setBoolean(parameterIndex: Int, x: Boolean): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setByte(parameterIndex: Int, x: Byte): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setShort(parameterIndex: Int, x: Short): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setInt(parameterIndex: Int, x: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setLong(parameterIndex: Int, x: Long): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setFloat(parameterIndex: Int, x: Float): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setDouble(parameterIndex: Int, x: Double): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setString(parameterIndex: Int, x: String): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBytes(parameterIndex: Int, x: scala.Array[Byte]): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setDate(parameterIndex: Int, x: Date): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTime(parameterIndex: Int, x: Time): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def clearParameters(): Unit = parameterMetaData.clear()
-
- override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setObject(parameterIndex: Int, x: Any): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def execute(): Boolean = {
- executeUpdate()
- true
- }
-
- override def addBatch(): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = ()
-
- override def setRef(parameterIndex: Int, x: Ref): Unit = ()
-
- override def setBlob(parameterIndex: Int, x: Blob): Unit = ()
-
- override def setClob(parameterIndex: Int, x: Clob): Unit = ()
-
- override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = ()
-
- override def getMetaData: ResultSetMetaData = ???
-
- override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = {
- parameterMetaData.put(parameterIndex, x)
- }
-
- override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = parameterMetaData.put(parameterIndex, null)
-
- override def setURL(parameterIndex: Int, x: URL): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def getParameterMetaData: ParameterMetaData = ??? //TODO "PreparedStatement.setTimestamp threw a NullPointerException if getParameterMetaData() was called before the statement was executed. This fix adds the missing null checks to getParameterMetaData() to avoid the exception."
-
- override def setRowId(parameterIndex: Int, x: RowId): Unit = ()
-
- override def setNString(parameterIndex: Int, value: String): Unit = ()
-
- override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = ()
-
- override def setNClob(parameterIndex: Int, value: NClob): Unit = ()
-
- override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = ()
-
- override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = ()
-
- override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int, scaleOrLength: Int): Unit = parameterMetaData.put(parameterIndex, x)
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = ()
-
- override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = ()
-
- override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = ()
-
- override def setClob(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = ()
-
- override def setNClob(parameterIndex: Int, reader: Reader): Unit = ()
-
- override def executeQuery(sql: String): ResultSet = ???
-
- override def executeUpdate(sql: String): Int = 0
-
- override def close(): Unit = ()
-
- override def getMaxFieldSize: Int = 0
-
- override def setMaxFieldSize(max: Int): Unit = ()
-
- override def getMaxRows: Int = 0
-
- override def setMaxRows(max: Int): Unit = ()
-
- override def setEscapeProcessing(enable: Boolean): Unit = ()
-
- override def getQueryTimeout: Int = 0
-
- override def setQueryTimeout(seconds: Int): Unit = ()
-
- override def cancel(): Unit = ()
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def setCursorName(name: String): Unit = ()
-
- override def execute(sql: String): Boolean = {
- this.resultSet = new SparkJdbcResultSet(sparkSession.sql(buildSql), this)
- true
- }
-
- override def getResultSet: ResultSet = new SparkJdbcResultSet(sparkSession.sql(buildSql), this)
-
- override def getUpdateCount: Int = -1
-
- override def getMoreResults: Boolean = !this.resultSet.alreadyTheLast()
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getResultSetConcurrency: Int = 0
-
- override def getResultSetType: Int = 0
-
- override def addBatch(sql: String): Unit = ()
-
- override def clearBatch(): Unit = ()
-
- override def executeBatch(): scala.Array[Int] = ???
-
- override def getConnection: Connection = ???
-
- override def getMoreResults(current: Int): Boolean = !this.resultSet.alreadyTheLast()
-
- override def getGeneratedKeys: ResultSet = ???
-
- override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = {
- sparkSession.sql(sql)
- 0
- }
-
- override def executeUpdate(sql: String, columnIndexes: scala.Array[Int]): Int = {
- sparkSession.sql(sql)
- 0
- }
-
- override def executeUpdate(sql: String, columnNames: scala.Array[String]): Int = {
- sparkSession.sql(sql)
- 0
- }
-
- override def execute(sql: String, autoGeneratedKeys: Int): Boolean = {
- sparkSession.sql(sql)
- true
- }
-
- override def execute(sql: String, columnIndexes: scala.Array[Int]): Boolean = {
- sparkSession.sql(sql)
- true
- }
-
- override def execute(sql: String, columnNames: scala.Array[String]): Boolean = {
- sparkSession.sql(sql)
- true
- }
-
- override def getResultSetHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def setPoolable(poolable: Boolean): Unit = ()
-
- override def isPoolable: Boolean = false
-
- override def closeOnCompletion(): Unit = ()
-
- override def isCloseOnCompletion: Boolean = false
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSet.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSet.scala
deleted file mode 100644
index dee7cb5..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSet.scala
+++ /dev/null
@@ -1,426 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import org.apache.spark.sql.{DataFrame, Row}
-
-import java.io.{InputStream, Reader}
-import java.net.URL
-import java.{sql, util}
-import java.sql.{Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLWarning, SQLXML, Statement, Time, Timestamp}
-import java.time.LocalDateTime
-import java.util.Calendar
-
-// scalastyle:off
-class SparkJdbcResultSet(val data: DataFrame, val statement: Statement) extends ResultSet {
-
- private val datas = data.toLocalIterator()
- private var currentData: Row = null
-
- def alreadyTheLast() = (currentData == null) || !datas.hasNext
-
- override def next(): Boolean = {
- val hasNext = datas.hasNext
- if (hasNext) {
- currentData = datas.next
- }
- hasNext
- }
-
- override def close(): Unit = ()
-
- override def wasNull(): Boolean = false
-
- override def getString(columnIndex: Int): String = currentData.get(columnIndex - 1).toString
-
- override def getBoolean(columnIndex: Int): Boolean = currentData.getBoolean(columnIndex - 1)
-
- override def getByte(columnIndex: Int): Byte = currentData.getByte(columnIndex - 1)
-
- override def getShort(columnIndex: Int): Short = currentData.getShort(columnIndex - 1)
-
- override def getInt(columnIndex: Int): Int = {
- try {
- currentData.getInt(columnIndex - 1)
- } catch {
- case _: Exception => 0
- }
- }
-
- override def getLong(columnIndex: Int): Long = currentData.getLong(columnIndex - 1)
-
- override def getFloat(columnIndex: Int): Float = currentData.getFloat(columnIndex - 1)
-
- override def getDouble(columnIndex: Int): Double = currentData.getDouble(columnIndex - 1)
-
- override def getBigDecimal(columnIndex: Int, scale: Int): java.math.BigDecimal = currentData.getDecimal(columnIndex - 1)
-
- override def getBytes(columnIndex: Int): Array[Byte] = Array(currentData.getByte(columnIndex - 1))
-
- override def getDate(columnIndex: Int): Date = currentData.getDate(columnIndex - 1)
-
- override def getTime(columnIndex: Int): Time = ??? //currentData.getTimestamp(columnIndex -1)
-
- override def getTimestamp(columnIndex: Int): Timestamp = currentData.getTimestamp(columnIndex - 1)
-
- override def getAsciiStream(columnIndex: Int): InputStream = ???
-
- override def getUnicodeStream(columnIndex: Int): InputStream = ???
-
- override def getBinaryStream(columnIndex: Int): InputStream = ???
-
- override def getString(columnLabel: String): String = currentData.getAs(columnLabel)
-
- override def getBoolean(columnLabel: String): Boolean = currentData.getAs(columnLabel)
-
- override def getByte(columnLabel: String): Byte = currentData.getAs(columnLabel)
-
- override def getShort(columnLabel: String): Short = currentData.getAs(columnLabel)
-
- override def getInt(columnLabel: String): Int = currentData.getAs(columnLabel)
-
- override def getLong(columnLabel: String): Long = currentData.getAs(columnLabel)
-
- override def getFloat(columnLabel: String): Float = currentData.getAs(columnLabel)
-
- override def getDouble(columnLabel: String): Double = currentData.getAs(columnLabel)
-
- override def getBigDecimal(columnLabel: String, scale: Int): java.math.BigDecimal = currentData.getAs(columnLabel)
-
- override def getBytes(columnLabel: String): Array[Byte] = currentData.getAs(columnLabel)
-
- override def getDate(columnLabel: String): Date = currentData.getAs(columnLabel)
-
- override def getTime(columnLabel: String): Time = currentData.getAs(columnLabel)
-
- override def getTimestamp(columnLabel: String): Timestamp = currentData.getAs(columnLabel)
-
- override def getAsciiStream(columnLabel: String): InputStream = ???
-
- override def getUnicodeStream(columnLabel: String): InputStream = ???
-
- override def getBinaryStream(columnLabel: String): InputStream = ???
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def getCursorName: String = ""
-
- override def getMetaData: ResultSetMetaData = new SparkJdbcResultSetMetaData(data)
-
- override def getObject(columnIndex: Int): AnyRef = null
-
- override def getObject(columnLabel: String): AnyRef = null
-
- override def findColumn(columnLabel: String): Int = currentData.fieldIndex(columnLabel)
-
- override def getCharacterStream(columnIndex: Int): Reader = ???
-
- override def getCharacterStream(columnLabel: String): Reader = ???
-
- override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = currentData.getDecimal(columnIndex - 1)
-
- override def getBigDecimal(columnLabel: String): java.math.BigDecimal = currentData.getAs(columnLabel)
-
- override def isBeforeFirst: Boolean = false
-
- override def isAfterLast: Boolean = false
-
- override def isFirst: Boolean = false
-
- override def isLast: Boolean = false
-
- override def beforeFirst(): Unit = ()
-
- override def afterLast(): Unit = ()
-
- override def first(): Boolean = false
-
- override def last(): Boolean = false
-
- override def getRow: Int = 0
-
- override def absolute(row: Int): Boolean = false
-
- override def relative(rows: Int): Boolean = false
-
- override def previous(): Boolean = false
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getType: Int = 0
-
- override def getConcurrency: Int = 0
-
- override def rowUpdated(): Boolean = ???
-
- override def rowInserted(): Boolean = ???
-
- override def rowDeleted(): Boolean = ???
-
- override def updateNull(columnIndex: Int): Unit = ()
-
- override def updateBoolean(columnIndex: Int, x: Boolean): Unit = ()
-
- override def updateByte(columnIndex: Int, x: Byte): Unit = ()
-
- override def updateShort(columnIndex: Int, x: Short): Unit = ()
-
- override def updateInt(columnIndex: Int, x: Int): Unit = ()
-
- override def updateLong(columnIndex: Int, x: Long): Unit = ()
-
- override def updateFloat(columnIndex: Int, x: Float): Unit = ()
-
- override def updateDouble(columnIndex: Int, x: Double): Unit = ()
-
- override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = ()
-
- override def updateString(columnIndex: Int, x: String): Unit = ()
-
- override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = ()
-
- override def updateDate(columnIndex: Int, x: Date): Unit = ()
-
- override def updateTime(columnIndex: Int, x: Time): Unit = ()
-
- override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = ()
-
- override def updateObject(columnIndex: Int, x: Any, scaleOrLength: Int): Unit = ()
-
- override def updateObject(columnIndex: Int, x: Any): Unit = ()
-
- override def updateNull(columnLabel: String): Unit = ()
-
- override def updateBoolean(columnLabel: String, x: Boolean): Unit = ()
-
- override def updateByte(columnLabel: String, x: Byte): Unit = ()
-
- override def updateShort(columnLabel: String, x: Short): Unit = ()
-
- override def updateInt(columnLabel: String, x: Int): Unit = ()
-
- override def updateLong(columnLabel: String, x: Long): Unit = ()
-
- override def updateFloat(columnLabel: String, x: Float): Unit = ()
-
- override def updateDouble(columnLabel: String, x: Double): Unit = ()
-
- override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = ()
-
- override def updateString(columnLabel: String, x: String): Unit = ()
-
- override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = ()
-
- override def updateDate(columnLabel: String, x: Date): Unit = ()
-
- override def updateTime(columnLabel: String, x: Time): Unit = ()
-
- override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = ()
-
- override def updateObject(columnLabel: String, x: Any, scaleOrLength: Int): Unit = ()
-
- override def updateObject(columnLabel: String, x: Any): Unit = ()
-
- override def insertRow(): Unit = ()
-
- override def updateRow(): Unit = ()
-
- override def deleteRow(): Unit = ()
-
- override def refreshRow(): Unit = ()
-
- override def cancelRowUpdates(): Unit = ()
-
- override def moveToInsertRow(): Unit = ()
-
- override def moveToCurrentRow(): Unit = ()
-
- override def getStatement: Statement = statement
-
- override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]): AnyRef = null
-
- override def getRef(columnIndex: Int): Ref = null
-
- override def getBlob(columnIndex: Int): Blob = ???
-
- override def getClob(columnIndex: Int): Clob = ???
-
- override def getArray(columnIndex: Int): sql.Array = ???
-
- override def getObject(columnLabel: String, map: util.Map[String, Class[_]]): AnyRef = null
-
- override def getRef(columnLabel: String): Ref = null
-
- override def getBlob(columnLabel: String): Blob = ???
-
- override def getClob(columnLabel: String): Clob = ???
-
- override def getArray(columnLabel: String): sql.Array = ???
-
- override def getDate(columnIndex: Int, cal: Calendar): Date = ???
-
- override def getDate(columnLabel: String, cal: Calendar): Date = ???
-
- override def getTime(columnIndex: Int, cal: Calendar): Time = ???
-
- override def getTime(columnLabel: String, cal: Calendar): Time = ???
-
- override def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = ???
-
- override def getTimestamp(columnLabel: String, cal: Calendar): Timestamp = ???
-
- override def getURL(columnIndex: Int): URL = ???
-
- override def getURL(columnLabel: String): URL = ???
-
- override def updateRef(columnIndex: Int, x: Ref): Unit = ()
-
- override def updateRef(columnLabel: String, x: Ref): Unit = ()
-
- override def updateBlob(columnIndex: Int, x: Blob): Unit = ()
-
- override def updateBlob(columnLabel: String, x: Blob): Unit = ()
-
- override def updateClob(columnIndex: Int, x: Clob): Unit = ()
-
- override def updateClob(columnLabel: String, x: Clob): Unit = ()
-
- override def updateArray(columnIndex: Int, x: sql.Array): Unit = ()
-
- override def updateArray(columnLabel: String, x: sql.Array): Unit = ()
-
- override def getRowId(columnIndex: Int): RowId = ???
-
- override def getRowId(columnLabel: String): RowId = ???
-
- override def updateRowId(columnIndex: Int, x: RowId): Unit = ()
-
- override def updateRowId(columnLabel: String, x: RowId): Unit = ()
-
- override def getHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def updateNString(columnIndex: Int, nString: String): Unit = ()
-
- override def updateNString(columnLabel: String, nString: String): Unit = ()
-
- override def updateNClob(columnIndex: Int, nClob: NClob): Unit = ()
-
- override def updateNClob(columnLabel: String, nClob: NClob): Unit = ()
-
- override def getNClob(columnIndex: Int): NClob = ???
-
- override def getNClob(columnLabel: String): NClob = ???
-
- override def getSQLXML(columnIndex: Int): SQLXML = ???
-
- override def getSQLXML(columnLabel: String): SQLXML = ???
-
- override def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = ()
-
- override def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = ()
-
- override def getNString(columnIndex: Int): String = ???
-
- override def getNString(columnLabel: String): String = ???
-
- override def getNCharacterStream(columnIndex: Int): Reader = ???
-
- override def getNCharacterStream(columnLabel: String): Reader = ???
-
- override def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = ()
-
- override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = ()
-
- override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = ()
-
- override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = ()
-
- override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = ()
-
- override def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = ()
-
- override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = ()
-
- override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = ()
-
- override def updateCharacterStream(columnIndex: Int, x: Reader): Unit = ()
-
- override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = ()
-
- override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = ()
-
- override def updateCharacterStream(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = ()
-
- override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = ()
-
- override def updateClob(columnIndex: Int, reader: Reader): Unit = ()
-
- override def updateClob(columnLabel: String, reader: Reader): Unit = ()
-
- override def updateNClob(columnIndex: Int, reader: Reader): Unit = ()
-
- override def updateNClob(columnLabel: String, reader: Reader): Unit = ()
-
- override def getObject[T](columnIndex: Int, `type`: Class[T]): T = ???
-
- override def getObject[T](columnLabel: String, `type`: Class[T]): T = {
- val value: Any = currentData.getAs(columnLabel)
-
- val ldtCls = classOf[LocalDateTime]
-
- `type` match {
- case ldtCls =>
- // from timestamp => localdatetime
- value.asInstanceOf[Timestamp].toLocalDateTime.asInstanceOf[T]
- case _ => value.asInstanceOf[T]
- }
- }
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = ???
-}
-
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSetMetaData.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSetMetaData.scala
deleted file mode 100644
index 5488d9a..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcResultSetMetaData.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import org.apache.spark.sql.DataFrame
-
-import java.sql.ResultSetMetaData
-
-// scalastyle:off
-class SparkJdbcResultSetMetaData(val data: DataFrame) extends ResultSetMetaData{
- override def getColumnCount: Int = data.columns.length
-
- override def isAutoIncrement(column: Int): Boolean = false
-
- override def isCaseSensitive(column: Int): Boolean = false
-
- override def isSearchable(column: Int): Boolean = true
-
- override def isCurrency(column: Int): Boolean = false
-
- override def isNullable(column: Int): Int = 0
-
- override def isSigned(column: Int): Boolean = false
-
- override def getColumnDisplaySize(column: Int): Int = data.columns(column - 1).length
-
- override def getColumnLabel(column: Int): String = data.columns(column - 1)
-
- override def getColumnName(column: Int): String = data.columns(column - 1)
-
- override def getSchemaName(column: Int): String = data.columns(column - 1)
-
- override def getPrecision(column: Int): Int = 0
-
- override def getScale(column: Int): Int = 0
-
- override def getTableName(column: Int): String = ""
-
- override def getCatalogName(column: Int): String = ""
-
- override def getColumnType(column: Int): Int = 0
-
- override def getColumnTypeName(column: Int): String = data.dtypes(column - 1)._2
-
- override def isReadOnly(column: Int): Boolean = false
-
- override def isWritable(column: Int): Boolean = true
-
- override def isDefinitelyWritable(column: Int): Boolean = true
-
- override def getColumnClassName(column: Int): String = ""
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = ???
-}
-
-// scalastyle:on
\ No newline at end of file
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcStatement.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcStatement.scala
deleted file mode 100644
index 4f3b3de..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/driver/SparkJdbcStatement.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.driver
-
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-
-import java.sql.{Connection, ResultSet, SQLWarning, Statement}
-
-// scalastyle:off
-class SparkJdbcStatement extends Statement {
-
- private var resultSet: SparkJdbcResultSet = null
-
- override def executeQuery(sql: String): ResultSet = {
- println(s"[DRIVER] exscuting sql $sql")
- new SparkJdbcResultSet(sparkSession.sql(sql), this)
- }
-
- override def executeUpdate(sql: String): Int = {
- println(s"[DRIVER] exscuting sql $sql")
- sparkSession.sql(sql)
- 0
- }
-
- override def close(): Unit = ()
-
- override def getMaxFieldSize: Int = 0
-
- override def setMaxFieldSize(max: Int): Unit = ()
-
- override def getMaxRows: Int = 0
-
- override def setMaxRows(max: Int): Unit = ()
-
- override def setEscapeProcessing(enable: Boolean): Unit = ()
-
- override def getQueryTimeout: Int = 0
-
- override def setQueryTimeout(seconds: Int): Unit = ()
-
- override def cancel(): Unit = ()
-
- override def getWarnings: SQLWarning = null
-
- override def clearWarnings(): Unit = ()
-
- override def setCursorName(name: String): Unit = ()
-
- override def execute(sql: String): Boolean = {
- this.resultSet = new SparkJdbcResultSet(sparkSession.sql(sql), this)
- true
- }
-
- override def getResultSet: ResultSet = resultSet
-
- override def getUpdateCount: Int = -1
-
- override def getMoreResults: Boolean = !this.resultSet.alreadyTheLast()
-
- override def setFetchDirection(direction: Int): Unit = ()
-
- override def getFetchDirection: Int = 0
-
- override def setFetchSize(rows: Int): Unit = ()
-
- override def getFetchSize: Int = 0
-
- override def getResultSetConcurrency: Int = 0
-
- override def getResultSetType: Int = 0
-
- override def addBatch(sql: String): Unit = ()
-
- override def clearBatch(): Unit = ()
-
- override def executeBatch(): Array[Int] = Array.empty
-
- override def getConnection: Connection = new SparkJdbcConnection()
-
- override def getMoreResults(current: Int): Boolean = !this.resultSet.alreadyTheLast()
-
- override def getGeneratedKeys: ResultSet = null
-
- override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = 0
-
- override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = 0
-
- override def executeUpdate(sql: String, columnNames: Array[String]): Int = 0
-
- override def execute(sql: String, autoGeneratedKeys: Int): Boolean = false
-
- override def execute(sql: String, columnIndexes: Array[Int]): Boolean = false
-
- override def execute(sql: String, columnNames: Array[String]): Boolean = false
-
- override def getResultSetHoldability: Int = 0
-
- override def isClosed: Boolean = false
-
- override def setPoolable(poolable: Boolean): Unit = ()
-
- override def isPoolable: Boolean = false
-
- override def closeOnCompletion(): Unit = ()
-
- override def isCloseOnCompletion: Boolean = false
-
- override def unwrap[T](iface: Class[T]): T = ???
-
- override def isWrapperFor(iface: Class[_]): Boolean = false
-}
-
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveConnection.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveConnection.java
deleted file mode 100644
index 866230e..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveConnection.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import org.flywaydb.core.internal.database.base.Connection;
-import org.flywaydb.core.internal.database.base.Schema;
-
-import java.sql.SQLException;
-
-public class HiveConnection extends Connection {
- protected HiveConnection(HiveDatabase database, java.sql.Connection connection) {
- super(database, connection);
- }
-
- @Override
- protected String getCurrentSchemaNameOrSearchPath() throws SQLException {
- return jdbcTemplate.queryForString("SELECT current_database()");
- }
-
- @Override
- public Schema getSchema(String name) {
- return new HiveSchema(this.getJdbcTemplate(), this.database, name);
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabase.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabase.java
deleted file mode 100644
index 3e8bc89..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabase.java
+++ /dev/null
@@ -1,123 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession;
-import org.flywaydb.core.api.CoreMigrationType;
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.internal.database.base.Database;
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
-import org.flywaydb.core.internal.jdbc.StatementInterceptor;
-import org.flywaydb.core.internal.util.AbbreviationUtils;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-public class HiveDatabase extends Database {
- public HiveDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
- super(configuration, jdbcConnectionFactory, statementInterceptor);
- }
-
- @Override
- protected String doGetCatalog() throws SQLException {
- return "sharp_etl";
- }
-
- @Override
- protected HiveConnection doGetConnection(Connection connection) {
- return new HiveConnection(this, connection);
- }
-
- @Override
- public void ensureSupported() {
-
- }
-
- @Override
- public boolean supportsDdlTransactions() {
- return false;
- }
-
- @Override
- public String getBooleanTrue() {
- return "true";
- }
-
- @Override
- public String getBooleanFalse() {
- return "false";
- }
-
- @Override
- public String doQuote(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- protected String getOpenQuote() {
- return "`";
- }
-
- @Override
- protected String getCloseQuote() {
- return "`";
- }
-
- @Override
- public String getEscapedQuote() {
- return "\\`";
- }
-
- @Override
- public boolean catalogIsSchema() {
- return true;
- }
-
- @Override
- public String getRawCreateScript(Table table, boolean baseline) {
- ETLSparkSession.getHiveSparkSession().sql("create database if not exists `sharp_etl`;");
- return "CREATE TABLE " + table + " (\n" +
- " `installed_rank` INT NOT NULL,\n" +
- " `version` STRING,\n" +
- " `description` STRING NOT NULL,\n" +
- " `type` STRING NOT NULL,\n" +
- " `script` STRING NOT NULL,\n" +
- " `checksum` INT,\n" +
- " `installed_by` STRING NOT NULL,\n" +
- " `installed_on` TIMESTAMP NOT NULL,\n" +
- " `execution_time` INT NOT NULL,\n" +
- " `success` BOOLEAN NOT NULL\n" +
- ");\n" + baselineStatement(table) + ";\n";
- }
-
- @Override
- public String getInsertStatement(Table table) {
- // Explicitly set installed_on to CURRENT_TIMESTAMP().
- return "INSERT INTO " + table
- + " (" + quote("installed_rank")
- + ", " + quote("version")
- + ", " + quote("description")
- + ", " + quote("type")
- + ", " + quote("script")
- + ", " + quote("checksum")
- + ", " + quote("installed_by")
- + ", " + quote("installed_on")
- + ", " + quote("execution_time")
- + ", " + quote("success")
- + ")"
- + " VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP(), ?, ?)";
- }
-
- public String baselineStatement(Table table) {
- return String.format(getInsertStatement(table).replace("?", "%s"),
- 1,
- "'0'",
- "'" + AbbreviationUtils.abbreviateDescription(configuration.getBaselineDescription()) + "'",
- "'" + CoreMigrationType.BASELINE + "'",
- "'" + AbbreviationUtils.abbreviateScript(configuration.getBaselineDescription()) + "'",
- "NULL",
- "'" + getInstalledBy() + "'",
- 0,
- getBooleanTrue()
- );
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabaseType.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabaseType.java
deleted file mode 100644
index 8b2d390..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveDatabaseType.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import org.flywaydb.core.api.ResourceProvider;
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.internal.database.base.BaseDatabaseType;
-import org.flywaydb.core.internal.database.base.Database;
-import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
-import org.flywaydb.core.internal.jdbc.StatementInterceptor;
-import org.flywaydb.core.internal.parser.Parser;
-import org.flywaydb.core.internal.parser.ParsingContext;
-
-import java.sql.Connection;
-import java.sql.Types;
-
-public class HiveDatabaseType extends BaseDatabaseType {
- @Override
- public String getName() {
- return "spark_sharp_etl";
- }
-
- @Override
- public int getNullType() {
- return Types.VARCHAR;
- }
-
- @Override
- public boolean handlesJDBCUrl(String url) {
- return url.startsWith("jdbc:spark_sharp_etl:");
- }
-
- @Override
- public String getDriverClass(String url, ClassLoader classLoader) {
- return "com.github.sharpdata.sharpetl.spark.extra.driver.SparkJdbcDriver";
- }
-
- @Override
- public boolean handlesDatabaseProductNameAndVersion(String databaseProductName, String databaseProductVersion, Connection connection) {
- return databaseProductName.startsWith("spark_sharp_etl");
- }
-
- @Override
- public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
- return new HiveDatabase(configuration, jdbcConnectionFactory, statementInterceptor);
- }
-
- @Override
- public Parser createParser(Configuration configuration, ResourceProvider resourceProvider, ParsingContext parsingContext) {
- return new HiveParser(configuration, parsingContext, 8);
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveParser.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveParser.java
deleted file mode 100644
index fd33de1..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveParser.java
+++ /dev/null
@@ -1,184 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import org.flywaydb.core.api.configuration.Configuration;
-import org.flywaydb.core.api.resource.Resource;
-import org.flywaydb.core.internal.parser.*;
-import org.flywaydb.core.internal.sqlscript.Delimiter;
-import org.flywaydb.core.internal.sqlscript.ParsedSqlStatement;
-import org.flywaydb.core.internal.sqlscript.SqlStatement;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Set;
-import java.util.regex.Pattern;
-
-public class HiveParser extends Parser {
- protected HiveParser(Configuration configuration, ParsingContext parsingContext, int peekDepth) {
- super(configuration, parsingContext, peekDepth);
- }
-
- @Override
- protected Delimiter getDefaultDelimiter() {
- return super.getDefaultDelimiter();
- }
-
- @Override
- protected char getIdentifierQuote() {
- return super.getIdentifierQuote();
- }
-
- @Override
- protected char getAlternativeIdentifierQuote() {
- return super.getAlternativeIdentifierQuote();
- }
-
- @Override
- protected char getAlternativeStringLiteralQuote() {
- return super.getAlternativeStringLiteralQuote();
- }
-
- @Override
- protected char getOpeningIdentifierSymbol() {
- return super.getOpeningIdentifierSymbol();
- }
-
- @Override
- protected char getClosingIdentifierSymbol() {
- return super.getClosingIdentifierSymbol();
- }
-
- @Override
- protected Set getValidKeywords() {
- return super.getValidKeywords();
- }
-
- @Override
- protected boolean supportsPeekingMultipleLines() {
- return super.supportsPeekingMultipleLines();
- }
-
- @Override
- protected SqlStatement getNextStatement(Resource resource, PeekingReader reader, Recorder recorder, PositionTracker tracker, ParserContext context) {
- return super.getNextStatement(resource, reader, recorder, tracker, context);
- }
-
- @Override
- protected boolean shouldAdjustBlockDepth(ParserContext context, List tokens, Token token) {
- return super.shouldAdjustBlockDepth(context, tokens, token);
- }
-
- @Override
- protected boolean shouldDiscard(Token token, boolean nonCommentPartSeen) {
- return super.shouldDiscard(token, nonCommentPartSeen);
- }
-
- @Override
- protected void resetDelimiter(ParserContext context) {
- super.resetDelimiter(context);
- }
-
- @Override
- protected void adjustDelimiter(ParserContext context, StatementType statementType) {
- super.adjustDelimiter(context, statementType);
- }
-
- @Override
- protected int getTransactionalDetectionCutoff() {
- return super.getTransactionalDetectionCutoff();
- }
-
- @Override
- protected void adjustBlockDepth(ParserContext context, List tokens, Token keyword, PeekingReader reader) throws IOException {
- super.adjustBlockDepth(context, tokens, keyword, reader);
- }
-
- @Override
- protected int getLastKeywordIndex(List tokens) {
- return super.getLastKeywordIndex(tokens);
- }
-
- @Override
- protected int getLastKeywordIndex(List tokens, int endIndex) {
- return super.getLastKeywordIndex(tokens, endIndex);
- }
-
- @Override
- protected boolean doTokensMatchPattern(List previousTokens, Token current, Pattern regex) {
- return super.doTokensMatchPattern(previousTokens, current, regex);
- }
-
- @Override
- protected ParsedSqlStatement createStatement(PeekingReader reader, Recorder recorder, int statementPos, int statementLine, int statementCol, int nonCommentPartPos, int nonCommentPartLine, int nonCommentPartCol, StatementType statementType, boolean canExecuteInTransaction, Delimiter delimiter, String sql) throws IOException {
- return super.createStatement(reader, recorder, statementPos, statementLine, statementCol, nonCommentPartPos, nonCommentPartLine, nonCommentPartCol, statementType, canExecuteInTransaction, delimiter, sql);
- }
-
- @Override
- protected Boolean detectCanExecuteInTransaction(String simplifiedStatement, List keywords) {
- return super.detectCanExecuteInTransaction(simplifiedStatement, keywords);
- }
-
- @Override
- protected String readKeyword(PeekingReader reader, Delimiter delimiter, ParserContext context) throws IOException {
- return super.readKeyword(reader, delimiter, context);
- }
-
- @Override
- protected String readIdentifier(PeekingReader reader) throws IOException {
- return super.readIdentifier(reader);
- }
-
- @Override
- protected Token handleDelimiter(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleDelimiter(reader, context, pos, line, col);
- }
-
- @Override
- protected boolean isAlternativeStringLiteral(String peek) {
- return super.isAlternativeStringLiteral(peek);
- }
-
- @Override
- protected boolean isDelimiter(String peek, ParserContext context, int col, int colIgnoringWhitepace) {
- return super.isDelimiter(peek, context, col, colIgnoringWhitepace);
- }
-
- @Override
- protected boolean isLetter(char c, ParserContext context) {
- return super.isLetter(c, context);
- }
-
- @Override
- protected boolean isSingleLineComment(String peek, ParserContext context, int col) {
- return super.isSingleLineComment(peek, context, col);
- }
-
- @Override
- protected boolean isKeyword(String text) {
- return super.isKeyword(text);
- }
-
- @Override
- protected boolean isCommentDirective(String peek) {
- return super.isCommentDirective(peek);
- }
-
- @Override
- protected Token handleCommentDirective(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleCommentDirective(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleStringLiteral(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleStringLiteral(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleAlternativeStringLiteral(PeekingReader reader, ParserContext context, int pos, int line, int col) throws IOException {
- return super.handleAlternativeStringLiteral(reader, context, pos, line, col);
- }
-
- @Override
- protected Token handleKeyword(PeekingReader reader, ParserContext context, int pos, int line, int col, String keyword) throws IOException {
- return super.handleKeyword(reader, context, pos, line, col, keyword);
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveSchema.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveSchema.java
deleted file mode 100644
index b18846d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveSchema.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import org.flywaydb.core.internal.database.base.Schema;
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcTemplate;
-
-import java.util.List;
-
-import java.sql.SQLException;
-
-public class HiveSchema extends Schema {
- /**
- * @param jdbcTemplate The Jdbc Template for communicating with the DB.
- * @param database The database-specific support.
- * @param name The name of the schema.
- */
- public HiveSchema(JdbcTemplate jdbcTemplate, HiveDatabase database, String name) {
- super(jdbcTemplate, database, name);
- }
-
- @Override
- protected boolean doExists() throws SQLException {
- return jdbcTemplate.queryForStringList("SHOW SCHEMAS").contains(name);
- }
-
- @Override
- protected boolean doEmpty() throws SQLException {
- return allTables().length == 0;
- }
-
- @Override
- protected void doCreate() throws SQLException {
- jdbcTemplate.execute("CREATE SCHEMA " + database.quote(name));
- }
-
- @Override
- protected void doDrop() throws SQLException {
- clean();
- jdbcTemplate.execute("DROP SCHEMA " + database.quote(name) + " RESTRICT");
- }
-
- @Override
- protected void doClean() throws SQLException {
- for (Table table : allTables())
- table.drop();
- }
-
- @Override
- protected org.flywaydb.core.internal.database.base.Table[] doAllTables() throws SQLException {
- List tableNames = jdbcTemplate.queryForStringList("SHOW TABLES IN " + database.quote(name));
-
- Table[] tables = new Table[tableNames.size()];
- for (int i = 0; i < tableNames.size(); i++) {
- tables[i] = new HiveTable(jdbcTemplate, database, this, tableNames.get(i));
- }
- return tables;
- }
-
- @Override
- public Table getTable(String tableName) {
- return new HiveTable(jdbcTemplate, database, this, tableName);
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveTable.java b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveTable.java
deleted file mode 100644
index e40364d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/extra/flyway/hive/HiveTable.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extra.flyway.hive;
-
-import org.flywaydb.core.internal.database.base.Table;
-import org.flywaydb.core.internal.jdbc.JdbcTemplate;
-
-import java.sql.SQLException;
-
-public class HiveTable extends Table{
- /**
- * @param jdbcTemplate The JDBC template for communicating with the DB.
- * @param database The database-specific support.
- * @param schema The schema this table lives in.
- * @param name The name of the table.
- */
- public HiveTable(JdbcTemplate jdbcTemplate, HiveDatabase database, HiveSchema schema, String name) {
- super(jdbcTemplate, database, schema, name);
- }
-
- @Override
- protected boolean doExists() throws SQLException {
- return com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.getHiveSparkSession().catalog().tableExists(schema.getName(), name);
- }
-
- @Override
- protected void doLock() throws SQLException {
-
- }
-
- @Override
- protected void doDrop() throws SQLException {
-
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/IO.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/IO.scala
deleted file mode 100644
index da109bb..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/IO.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
-import com.github.sharpdata.sharpetl.core.datasource.config.DataSourceConfig
-import com.github.sharpdata.sharpetl.core.exception.Exception.EmptyDataException
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants._
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-import com.github.sharpdata.sharpetl.core.annotation.AnnotationScanner
-import org.apache.spark.sql.functions.lit
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-object IO {
-
- def read(spark: SparkSession,
- step: WorkflowStep,
- variables: Variables,
- jobLog: JobLog): DataFrame = {
- val dataSourceConfig = step.getSourceConfig[DataSourceConfig]
-
- val value: Class[Source[_, _]] = AnnotationScanner.sourceRegister(dataSourceConfig.dataSourceType)
- assert(value != null)
-
- val df = value.getMethod("read", classOf[WorkflowStep], classOf[JobLog], classOf[SparkSession], classOf[Variables])
- .invoke(value.newInstance(), step, jobLog, spark, variables)
- .asInstanceOf[DataFrame]
-
- addDerivedColumns(dataSourceConfig, df)
- }
-
- def write(df: DataFrame,
- step: WorkflowStep,
- variables: Variables): Unit = {
- val targetConfig = step.getTargetConfig[DataSourceConfig]
- if ((step.throwExceptionIfEmpty == BooleanString.TRUE || step.skipFollowStepWhenEmpty == BooleanString.TRUE)
- && df.isEmpty) {
- throw EmptyDataException(s"Job aborted, because step ${step.step} 's result is empty", step.step)
- }
-
- val value: Class[Sink[_]] = AnnotationScanner.sinkRegister(targetConfig.dataSourceType)
- assert(value != null)
-
- value.getMethod("write", classOf[DataFrame], classOf[WorkflowStep], classOf[Variables])
- .invoke(value.newInstance(), df, step, variables)
- }
-
- private def addDerivedColumns(dataSourceConfig: DataSourceConfig, df: DataFrame): DataFrame = {
- if (dataSourceConfig.derivedColumns != null) {
- val derivedColumns = dataSourceConfig.derivedColumns
- .split(";")
- .map(_.split(":"))
-
- derivedColumns.foldLeft(df)((df: DataFrame, derivedColumn: Array[String]) =>
- df.withColumn(
- derivedColumn(0), lit(derivedColumn(1))
- )
- )
- } else {
- df
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/SparkWorkflowInterpreter.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/SparkWorkflowInterpreter.scala
deleted file mode 100644
index c883533..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/SparkWorkflowInterpreter.scala
+++ /dev/null
@@ -1,290 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job
-
-import com.github.sharpdata.sharpetl.core.api.{Variables, WorkflowInterpreter}
-import com.github.sharpdata.sharpetl.core.datasource.config.{DataSourceConfig, FileDataSourceConfig, StreamingDataSourceConfig}
-import com.github.sharpdata.sharpetl.core.exception.Exception.{EmptyDataException, FileDataSourceConfigErrorException, IncrementalDiffModeTooMuchDataException}
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-import com.github.sharpdata.sharpetl.spark.utils.{ConvertUtils, ETLSparkSession}
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRule
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants._
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{BigIntToLocalDateTime, L_YYYY_MM_DD_HH_MM_SS}
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.{incrementalDiffModeDataLimit, partitionColumn}
-import com.github.sharpdata.sharpetl.core.util.StringUtil.{BigIntConverter, isNullOrEmpty}
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.core.datasource.connection.FtpConnection
-import com.github.sharpdata.sharpetl.spark.datasource.FtpDataSource.listFileUrl
-import com.github.sharpdata.sharpetl.spark.datasource.{FtpDataSource, HdfsDataSource, ScpDataSource, StreamingDataSource}
-import com.github.sharpdata.sharpetl.spark.quality.SparkQualityCheck
-import org.apache.spark.sql.{DataFrame, SparkSession}
-import org.apache.spark.storage.StorageLevel
-import org.apache.spark.streaming.{Seconds, StreamingContext}
-
-import java.time.LocalDateTime
-import scala.collection.immutable
-import scala.jdk.CollectionConverters._
-import scala.util.control.NonFatal
-
-class SparkWorkflowInterpreter(override val spark: SparkSession,
- override val dataQualityCheckRules: Map[String, QualityCheckRule],
- override val qualityCheckAccessor: QualityCheckAccessor)
- extends SparkQualityCheck(spark, dataQualityCheckRules, qualityCheckAccessor) with WorkflowInterpreter[DataFrame] {
-
-
- override def evalSteps(steps: List[WorkflowStep], jobLog: JobLog, variables: Variables, start: String, end: String): Unit = {
- val batchStepNum = countBatchStepNum(steps)
- if (batchStepNum > 0) {
- super.evalSteps(steps, jobLog, variables, start, end)
- cleanUpTempTableFromMemory()
- }
- if (batchStepNum < steps.length) {
- executeMicroBatchSteps(steps.slice(batchStepNum, steps.length), jobLog, variables, start, end)
- }
- }
-
- private def cleanUpTempTableFromMemory(): Unit = {
- if (Environment.CURRENT != "test") {
- val tempTableNames = spark.catalog.listTables().filter(_.isTemporary)
- tempTableNames.collect().foreach(it => spark.catalog.dropTempView(it.name))
- }
- }
-
- def executeMicroBatchSteps(microBatchSteps: List[WorkflowStep],
- jobLog: JobLog,
- variables: Variables,
- start: String,
- end: String): Unit = {
- val firstMicroBatchStep = microBatchSteps.head
-
- val streamingDataSourceConfig = firstMicroBatchStep.getSourceConfig.asInstanceOf[StreamingDataSourceConfig]
- val streamingContext = new StreamingContext(
- sparkSession.sparkContext,
- Seconds(streamingDataSourceConfig.getInterval.toInt)
- )
- val stream = StreamingDataSource
- .createDStream(firstMicroBatchStep, jobLog, streamingContext)
-
- StreamingStep.executeStreamingStep(streamingDataSourceConfig, stream, (df: DataFrame) => {
- executeWrite(jobLog, df, firstMicroBatchStep, variables)
- evalSteps(microBatchSteps.tail, jobLog, variables, start, end)
-
- jobLog.setLastUpdateTime(LocalDateTime.now())
- jobLogAccessor.update(jobLog)
- })
- streamingContext.start()
- streamingContext.awaitTermination()
- }
-
- def countBatchStepNum(steps: List[WorkflowStep]): Int = {
- var batchStepNum = 0
- while (steps.length > batchStepNum &&
- !steps(batchStepNum).getSourceConfig.isInstanceOf[StreamingDataSourceConfig]) {
- batchStepNum += 1
- }
- batchStepNum
- }
-
- override def listFiles(step: WorkflowStep): List[String] = {
- val conf = step.source.asInstanceOf[FileDataSourceConfig]
-
- val files: List[String] = if (!isNullOrEmpty(conf.filePaths)) {
- conf.filePaths.split(",").toList
- } else {
- conf.dataSourceType match {
- case DataSourceType.FTP =>
- val sourceConfig = step.getSourceConfig[FileDataSourceConfig]
- val ftpConfig = new FtpConnection(sourceConfig.getConfigPrefix)
- if (sourceConfig.getFileDir != null && sourceConfig.getFileDir != "") {
- ftpConfig.dir = sourceConfig.getFileDir
- }
- listFileUrl(
- ftpConfig,
- sourceConfig.getFileNamePattern
- )
- case DataSourceType.HDFS | DataSourceType.JSON |
- DataSourceType.EXCEL | DataSourceType.CSV =>
- HdfsDataSource.listFileUrl(step)
- case DataSourceType.SCP =>
- ScpDataSource.listFilePath(step)
- case _ =>
- throw FileDataSourceConfigErrorException(s"Not supported data source type ${conf.dataSourceType}")
- }
- }
-
- ETLLogger.info(s"Files will to be processed:\n ${files.mkString(",\n")}")
- files
- }
-
- override def deleteSource(step: WorkflowStep): Unit = {
- val sourceConfig = step.getSourceConfig[FileDataSourceConfig]
- val configPrefix = sourceConfig.getConfigPrefix
- if (sourceConfig.getDeleteSource.toBoolean) {
- val sourceFilePath = sourceConfig.getFilePath
- step.getSourceConfig[DataSourceConfig].getDataSourceType.toLowerCase match {
- case DataSourceType.FTP =>
- FtpDataSource.delete(configPrefix, sourceFilePath)
- case DataSourceType.HDFS =>
- HDFSUtil.delete(sourceFilePath)
- case DataSourceType.SCP =>
- ScpDataSource.delete(configPrefix, sourceFilePath)
- case _ =>
- }
- }
- }
-
- override def readFile(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables,
- files: List[String]): DataFrame = {
- val df = if (files.isEmpty) {
- null // scalastyle:ignore
- } else {
- val dfs = files
- .map(file => {
- step.getSourceConfig[FileDataSourceConfig].setFilePath(file)
- executeRead(step, jobLog, variables)
- }).filter(df => !df.isEmpty)
-
- if (dfs.nonEmpty) {
- dfs.reduce(_ unionByName _)
- } else {
- null // scalastyle:ignore
- }
- }
- if (step.skipFollowStepWhenEmpty == BooleanString.TRUE && (df == null || df.isEmpty)) {
- throw EmptyDataException("Job skipping, because `skipFollowStepWhenEmpty` is true and file is empty!", step.step)
- }
- df
- }
-
- override def executeWrite(jobLog: JobLog, df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
- val stepLog = jobLog.getStepLog(step.step)
- val incrementalType = jobLog.logDrivenType
- ETLLogger.info(s"incremental type is ${incrementalType}")
- if (incrementalType == IncrementalType.DIFF && df.count() > incrementalDiffModeDataLimit.toLong) {
- throw IncrementalDiffModeTooMuchDataException(
- s"Incremental diff mode data limit is $incrementalDiffModeDataLimit, but current data count is ${df.count()}"
- )
- }
- if (incrementalType != IncrementalType.AUTO_INC_ID
- && incrementalType != IncrementalType.KAFKA_OFFSET
- && incrementalType != IncrementalType.UPSTREAM) {
- //`dataRangeStart` must be a datetime
- //value of partition column, we will use it later
- variables.put(s"$${$partitionColumn}", jobLog.dataRangeStart.asBigInt.asLocalDateTime().format(L_YYYY_MM_DD_HH_MM_SS))
- }
- if (df != null) {
- val count = if (step.target.dataSourceType == DataSourceType.VARIABLES) 0 else df.count().toInt
- stepLog.targetCount = count
- ETLLogger.info("[Physical Plan]:")
- df.explain()
- try {
- IO.write(df, step, variables)
- stepLog.successCount = count
- stepLog.failureCount = 0
- } catch {
- case e: Throwable =>
- stepLog.successCount = 0
- stepLog.failureCount = count
- throw e
- }
- }
- }
-
- // scalastyle:off
- override def executeRead(step: WorkflowStep,
- jobLog: JobLog,
- variables: Variables): DataFrame = {
- val stepLog = jobLog.getStepLog(step.step)
- var df = IO.read(spark, step, variables, jobLog)
- if (df != null) {
- if (step.getRepartition != null) {
- df = step.getRepartition match {
- case Pattern.REPARTITION_NUM_PATTERN() =>
- df.repartition(step.getRepartition.toInt)
- case Pattern.REPARTITION_COLUMNS_PATTERN(_) =>
- df.repartition(ConvertUtils.strsToColumns(step.getRepartition.split(",")): _*)
- case Pattern.REPARTITION_NUM_COLUMNS_PATTERN(_) =>
- val repartitionNumAndColumns = step.getRepartition.split(",")
- df.repartition(
- repartitionNumAndColumns.head.toInt,
- ConvertUtils.strsToColumns(
- repartitionNumAndColumns.slice(1, repartitionNumAndColumns.length)
- ): _*
- )
- case _ =>
- val errorMessage = s"Unknown input: ${step.getRepartition}."
- stepLog.error(errorMessage)
- throw new RuntimeException(errorMessage)
- }
- }
- if (step.coalesce != null) {
- df.coalesce(step.coalesce.toInt)
- }
- if (step.getPersist != null) {
- df.persist(StorageLevel.fromString(step.getPersist))
- }
- if (step.getCheckPoint != null && step.getCheckPoint.toBoolean) {
- df.localCheckpoint()
- }
- stepLog.sourceCount = if (step.target.dataSourceType == DataSourceType.VARIABLES) 0 else df.count().toInt
- }
- df
- }
- // scalastyle:on
-
-
- /**
- * 释放资源
- */
- override def close(): Unit = {
- try {
- ETLSparkSession.release(spark)
- } catch {
- case NonFatal(e) =>
- ETLLogger.error("Stop Spark session failed", e)
- }
- }
-
- override def applyConf(conf: Map[String, String]): Unit = {
- conf.foreach {
- case (key, value) =>
- ETLLogger.warn(s"Setting spark conf $key=$value")
- spark.conf.set(key, value)
- }
- }
-
- override def applicationId(): String = sparkSession.sparkContext.applicationId
-
- override def executeSqlToVariables(sql: String): List[Map[String, String]] = {
- val data: immutable.Seq[Map[String, String]] =
- sparkSession.sql(sql).toLocalIterator().asScala.toList
- .map(it =>
- it.getValuesMap(it.schema.fieldNames)
- )
- data
- .map(
- it =>
- it.map {
- case (key, value) => ("${" + key + "}", value)
- }
- )
- .toList
- }
-
- override def union(left: DataFrame, right: DataFrame): DataFrame = {
- if (left != null && right != null) {
- left.union(right)
- } else if (left == null && right == null) {
- null // scalastyle:ignore
- } else if (left == null) {
- right
- } else {
- left
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/StreamingStep.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/StreamingStep.scala
deleted file mode 100644
index 66b4259..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/job/StreamingStep.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job
-
-import com.github.sharpdata.sharpetl.core.datasource.config.{StreamingDataSourceConfig, StreamingKafkaDataSourceConfig}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession.implicits._
-import com.github.sharpdata.sharpetl.datasource.kafka.KafkaConfig.schemaMapping
-import org.apache.spark.sql.{DataFrame, Row}
-import org.apache.spark.sql.functions.from_json
-import org.apache.spark.sql.types.{StringType, StructField, StructType}
-import org.apache.spark.streaming.dstream.DStream
-
-
-object StreamingStep {
- def executeStreamingStep(streamingDataSourceConfig : StreamingDataSourceConfig,
- stream: DStream[Row],
- streamingCallback: DataFrame => Unit
- ) : Unit = {
- streamingDataSourceConfig match {
- case _ => KafkaStreamingStep.execute(streamingDataSourceConfig, stream, streamingCallback)
- }
- }
-}
-
-object KafkaStreamingStep{
-
- def execute( streamingDataSourceConfig : StreamingDataSourceConfig,
- stream: DStream[Row],
- streamingCallback: DataFrame => Unit): Unit = {
- //assume the rest streaming data source always be [[StreamKafkaDataSourceConfig]]
- val sourceSchema: StructType = StructType.fromDDL(streamingDataSourceConfig.asInstanceOf[StreamingKafkaDataSourceConfig].getSchemaDDL)
- val schemaMappingExpr = schemaMapping(sourceSchema)
- stream.foreachRDD((rdd, _) => {
- val df = sparkSession
- .createDataFrame(
- rdd,
- StructType(Array(StructField("json", StringType)))
- )
- .select(from_json($"json", sourceSchema) as "data")
- .selectExpr(schemaMappingExpr: _*)
- streamingCallback(df)
- })
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/quality/SparkQualityCheck.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/quality/SparkQualityCheck.scala
deleted file mode 100644
index 45bab4e..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/quality/SparkQualityCheck.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.quality
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck._
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityCheckResult, QualityCheck, QualityCheckRule}
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.utils.Encoder.dqEncoder
-import org.apache.spark.sql.{DataFrame, SparkSession}
-
-import scala.jdk.CollectionConverters._
-
-@Stable(since = "1.0.0")
-class SparkQualityCheck(val spark: SparkSession,
- override val dataQualityCheckRules: Map[String, QualityCheckRule],
- override val qualityCheckAccessor: QualityCheckAccessor)
- extends QualityCheck[DataFrame] {
-
- override def queryCheckResult(sql: String): Seq[DataQualityCheckResult] = {
- if (sql.trim == "") {
- Seq()
- } else {
- ETLLogger.info(s"execution sql:\n $sql")
- spark.sql(sql).as[DataQualityCheckResult](dqEncoder).collectAsList().asScala
- .map(it => DataQualityCheckResult(it.column, it.dataCheckType, it.ids, it.errorType.split(DELIMITER).head, it.warnCount, it.errorCount))
- .filterNot(it => it.warnCount < 1 && it.errorCount < 1)
- .toSeq
- }
- }
-
- override def execute(sql: String): DataFrame = {
- ETLLogger.info(s"Execution sql: \n $sql")
- spark.sql(sql)
- }
-
- override def createView(df: DataFrame, tempViewName: String): Unit = {
- ETLLogger.info(s"Creating temp view `$tempViewName`")
- df.createOrReplaceTempView(s"`$tempViewName`")
- }
-
- override def dropView(tempViewName: String): Unit = {
- ETLLogger.info(s"Dropping temp view `$tempViewName`")
- spark.catalog.dropTempView(s"`$tempViewName`")
- }
-
- override def dropUnusedCols(df: DataFrame, cols: String): DataFrame = {
- df.drop(cols.split(",").map(_.trim): _*)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckAllConnectorStatusTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckAllConnectorStatusTransformer.scala
deleted file mode 100644
index 3df77ea..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckAllConnectorStatusTransformer.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.utils.HttpStatusUtils
-import com.github.sharpdata.sharpetl.core.exception.Exception.CheckFailedException
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import CheckConnectorStatusTransformer.checkConnectorRunning
-import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
-import org.apache.http.impl.client.{CloseableHttpClient, HttpClients}
-import org.apache.spark.sql.DataFrame
-
-import java.io.{BufferedReader, InputStreamReader}
-import java.util.stream.Collectors
-
-
-// $COVERAGE-OFF$
-object CheckAllConnectorStatusTransformer extends Transformer {
-
- final case class CheckConnectorStatusException(message: String) extends RuntimeException(message)
-
- override def transform(args: Map[String, String]): DataFrame = {
- val truststoreLocation = ETLConfig.getProperty("truststore.location")
- System.setProperty("javax.net.ssl.trustStore", truststoreLocation)
-
- val uri: String = args("uri")
- val connectorNames = getConnectorList(uri)
- var disabledConnectors: Array[String] = Array.empty
- val httpclient: CloseableHttpClient = HttpClients.createDefault
- connectorNames.foreach(connectorName =>
- try {
- checkConnectorRunning(connectorName, uri, httpclient)
- }
- catch {
- case _: RuntimeException => disabledConnectors = disabledConnectors :+ connectorName
- })
- if (disabledConnectors.length > 0) {
- ETLLogger.error("Check Connector Status Failed, Some Connectors May Not Running")
- throw new CheckFailedException(s"Disabled Connector Names: ${disabledConnectors.mkString("{", ", ", "}")}")
- }
- else {
- ETLLogger.info("Check Connector finished, All Connector Running Normally")
- }
-
- sparkSession.emptyDataFrame
- }
-
- def getConnectorList(uri: String): Array[String] = {
- val endpoint = uri + "/connectors"
-
- val httpGet = new HttpGet(endpoint)
- val closeableHttpClient: CloseableHttpClient = HttpClients.createDefault()
- val closeableHttpResponse: CloseableHttpResponse = closeableHttpClient.execute(httpGet)
- try {
- if (!HttpStatusUtils.isSuccessful(closeableHttpResponse.getStatusLine)) {
- throw CheckConnectorStatusException(s"Get connector name list failed with statusCode ${closeableHttpResponse.getStatusLine.getStatusCode}")
- }
- val connectorNameString = new BufferedReader(
- new InputStreamReader(closeableHttpResponse.getEntity.getContent))
- .lines.collect(Collectors.joining(System.lineSeparator))
- .replace("\"", "")
- .replace("[", "")
- .replace("]", "")
- connectorNameString.split(",")
- } finally {
- closeableHttpResponse.close()
- closeableHttpClient.close()
- }
- }
-
-}
-
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckConnectorStatusTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckConnectorStatusTransformer.scala
deleted file mode 100644
index 408b98c..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/CheckConnectorStatusTransformer.scala
+++ /dev/null
@@ -1,131 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
-import com.github.sharpdata.sharpetl.spark.utils.HttpStatusUtils
-import com.github.sharpdata.sharpetl.core.exception.Exception.CheckFailedException
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import org.apache.http.client.methods.HttpGet
-import org.apache.http.impl.client.{CloseableHttpClient, HttpClients}
-import org.apache.http.util.EntityUtils
-import org.apache.spark.sql.DataFrame
-
-import java.nio.charset.StandardCharsets
-
-// $COVERAGE-OFF$
-object CheckConnectorStatusTransformer extends Transformer {
-
- override def transform(args: Map[String, String]): DataFrame = {
- val truststoreLocation = ETLConfig.getProperty("truststore.location")
- System.setProperty("javax.net.ssl.trustStore", truststoreLocation)
-
- val connectorName = args.getOrElse("connectorName", "").split(",")
- val uri = ETLConfig.getKafkaProperties
- val httpclient: CloseableHttpClient = HttpClients.createDefault
- try {
- connectorName.foreach(connector => checkConnectorRunning(connector.trim, uri, httpclient))
- }
- finally {
- if (httpclient != null) httpclient.close()
- }
- sparkSession.emptyDataFrame
- }
-
- def checkConnectorRunning(connectorName: String, uri: String, httpclient: CloseableHttpClient): Unit = {
- val statusResponse = getStatusResponse(httpclient, uri, connectorName)
- if (statusResponse.isRunning) {
- ETLLogger.info(s"$connectorName is running normally.")
- } else {
- ETLLogger.error(statusResponse.toString)
- throw new CheckFailedException(s"Check Status Failed $connectorName may not running")
- }
- }
-
- def getStatusResponse(httpclient: CloseableHttpClient, uri: String, connectorName: String): KafkaStatusResponse = {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName + "/status")
- val httpGet = new HttpGet(endpoint)
- val response = httpclient.execute(httpGet)
- val statusCode = response.getStatusLine
- if (HttpStatusUtils.isSuccessful(statusCode)) {
- val responseBody = EntityUtils.toString(response.getEntity, StandardCharsets.UTF_8)
- val objectMapper = new ObjectMapper()
- objectMapper.registerModule(DefaultScalaModule)
- objectMapper.readValue(responseBody, classOf[KafkaStatusResponse])
- } else {
- ETLLogger.error(s"Get connector $connectorName status failed.")
- ETLLogger.error(s"Causes: ${response.getStatusLine.getReasonPhrase}")
- throw new CheckFailedException(s"Check $connectorName Status Failed, caused by " + response.getStatusLine.getReasonPhrase)
- }
- }
-
- def buildEndpoint(uri: String, path: String): String = {
- if (uri.endsWith("/") && path.startsWith("/")) {
- uri.substring(0, uri.length - 1) + path
- } else {
- uri + path
- }
- }
-}
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-case class KafkaStatusResponse(name: String,
- connector: Connector,
- tasks: List[Task]) {
-
- override def toString: String = {
- "{" +
- "name='" + name + '\'' +
- ", connector=" + connector.toString() +
- ", tasks=" + tasks.toString() +
- '}'
- }
-
- def isRunning: Boolean = {
- if (tasks.isEmpty) {
- false
- }
- else {
- val taskRunningList = tasks.filter(task => task.state.equals(ConnectorState.RUNNING.toString))
- connector.state.equals(ConnectorState.RUNNING.toString) && taskRunningList.size == tasks.size
- }
- }
-
- def getTaskId: List[Int] = {
- tasks.map(task => task.id)
- }
-}
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-class Connector {
- var state: String = _
- override def toString: String = {
- "{" +
- "state='" + state + '\'' +
- '}'
- }
-}
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-class Task {
-
- var id: Int = _
- var state: String = _
- var trace: String = _
-
- override def toString: String = {
- "{" +
- "id=" + id +
- ", state='" + state + '\'' +
- ", trace='" + trace + '\'' +
- '}'
- }
-}
-
-object ConnectorState extends Enumeration {
- type ConnectorState = Value
- val UNASSIGNED, RUNNING, PAUSED, FAILED = Value
-}
-
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DDLTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DDLTransformer.scala
deleted file mode 100644
index 11462f7..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DDLTransformer.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.spark.datasource.connection.JdbcConnection
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.DataFrame
-
-import java.sql.Connection
-import java.sql.DriverManager
-
-// $COVERAGE-OFF$
-object DDLTransformer extends Transformer {
-
- override def transform(args: Map[String, String]): DataFrame = {
- val ddlPath = args.getOrElse("ddlPath", "/user/hive/sharp-etl/ddl")
- val hiveConn = getHiveJDBCConnection(ETLConfig.getProperty("hive.jdbc.url"))
- val pgConn = getPGJDBCConnection(args("dbName"), args("dbType"))
- val ddls = HDFSUtil.recursiveListFiles(ddlPath)
- try {
- ddls.foreach { ddl =>
- HDFSUtil.readLines(ddl).mkString("\n").split(";")
- .filterNot(it => isNullOrEmpty(it.trim))
- .foreach { sql =>
- val filePath = ddl.replace(ddlPath, "")
- if (filePath.contains("hive")) {
- ETLLogger.info(s"create hive table by $ddl")
- try {
- hiveConn.createStatement.execute(s"$sql\n")
- } catch {
- case e: Exception => ETLLogger.error(e.getMessage)
- }
- } else if (filePath.contains("yb") || filePath.contains("agg")) {
- ETLLogger.info(s"create yb table by $ddl")
- try {
- pgConn.createStatement.execute(s"$sql\n")
- } catch {
- case e: Exception => ETLLogger.error(e.getMessage)
- }
- } else {
- ETLLogger.error(s"Unknown ddl: $ddl")
- }
- }
- }
- } finally {
- hiveConn.close()
- pgConn.close()
- }
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
- private def getHiveJDBCConnection(jdbcUrl: String): Connection = {
- DriverManager.getConnection(jdbcUrl)
- }
-
- private def getPGJDBCConnection(dbName: String, dbType: String): Connection = {
- JdbcConnection(dbName, dbType).getConnection()
- }
-}
-
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformer.scala
deleted file mode 100644
index e0f818d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformer.scala
+++ /dev/null
@@ -1,131 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.notification.NotificationUtil
-import com.github.sharpdata.sharpetl.core.notification.sender.NotificationFactory
-import com.github.sharpdata.sharpetl.core.notification.sender.email.{Email, EmailAttachment, Sender}
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.StepLogAccessor.stepLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{L_YYYY_MM_DD_HH_MM_SS, YYYYMMDDHHMMSS}
-import com.github.sharpdata.sharpetl.core.util.JobLogUtil.JogLogExternal
-import JobLogDFConverter.JobLogList
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.types.{StringType, StructField, StructType}
-import org.apache.spark.sql.{DataFrame, Row}
-
-import java.time.LocalDateTime
-import scala.collection.immutable.ListMap
-import scala.util.{Failure, Success, Try}
-
-object DailyJobsSummaryReportTransformer extends Transformer {
- private lazy val notificationService = new NotificationUtil(jobLogAccessor)
-
- override def transform(args: Map[String, String]): DataFrame = {
- sendAllJobsSummaryReport(
- LocalDateTime.parse(args("dataRangeStart"), L_YYYY_MM_DD_HH_MM_SS),
- LocalDateTime.parse(args("dataRangeEnd"), L_YYYY_MM_DD_HH_MM_SS),
- args.get("datasource").map(_.split(",")).getOrElse(Array.empty))
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
- def sendAllJobsSummaryReport(startTime: LocalDateTime, endTime: LocalDateTime, datasource: Array[String]): Unit = {
- val jobLogs = jobLogAccessor.executionsBetween(startTime, endTime)
-
- if (jobLogs.nonEmpty) {
- val stepLogs = stepLogAccessor.stepLogsBetween(startTime, endTime).groupBy(_.jobId)
-
- val allJobLogs: Array[JobLog] = ListMap(
- jobLogs
- .groupBy(jobLog => (jobLog.projectName, jobLog.workflowName))
- .toSeq.sortBy(_._1): _*
- )
- .mapValues(_.sortBy(_.dataRangeStart))
- .values.flatten
- .map(jobLog => {
- jobLog.setStepLogs(stepLogs.getOrElse(jobLog.jobId, Array.empty))
- jobLog
- }
- ).toArray
-
- val dataframe = allJobLogs.toDF(datasource)
- val headers = dataframe.schema.map(_.name).mkString(",")
- val content = dataframe.collect().map(_.mkString(",")).mkString("\n")
- val csvText =
- s"""$headers
- |$content""".stripMargin
-
- val startTimeText = startTime.format(YYYYMMDDHHMMSS)
- val endTimeText = endTime.format(YYYYMMDDHHMMSS)
-
- NotificationFactory.sendNotification(
- new Email(
- Sender(notificationService.emailSender, notificationService.emailSenderPersonalName),
- notificationService.summaryJobReceivers,
- s"[${Environment.CURRENT.toUpperCase}] Daily ETL Job summary report($startTimeText to $endTimeText)",
- s"Attachment is report of all jobs between $startTimeText to $endTimeText",
- Option.apply(new EmailAttachment(csvText, "text/csv", s"attachment-$startTimeText-$endTimeText.csv")))
- )
- }
- }
-}
-
-object JobLogDFConverter {
- implicit class JobLogList(jobLogs: Array[JobLog]) {
- def toDF(datasource: Array[String]): DataFrame = {
- if (jobLogs.isEmpty) {
- ETLSparkSession.sparkSession.emptyDataFrame
- } else {
- val rows = jobLogs.map(jobLog => {
- val value = Array(
- jobLog.projectName,
- jobLog.workflowName,
- jobLog.jobId.toString,
- Try(LocalDateTime.parse(jobLog.dataRangeStart, YYYYMMDDHHMMSS)) match {
- case Failure(_) => jobLog.dataRangeStart
- case Success(value) => value.format(L_YYYY_MM_DD_HH_MM_SS)
- },
- Try(LocalDateTime.parse(jobLog.dataRangeEnd, YYYYMMDDHHMMSS)) match {
- case Failure(_) => jobLog.dataRangeStart
- case Success(value) => value.format(L_YYYY_MM_DD_HH_MM_SS)
- },
- jobLog.jobStartTime.format(L_YYYY_MM_DD_HH_MM_SS),
- jobLog.status,
- jobLog.duration().toString,
- jobLog.dataFlow()
- )
-
- val errorMessage = jobLog.errorMessage()
- val extendValue = datasource.map(datasource =>
- jobLog.getStepLogs().find(step => step.targetType == datasource).map(_.successCount.toString).getOrElse(""))
- Row.fromSeq(value ++ extendValue ++ Array(jobLog.failStep(), s""" "$errorMessage" """.trim))
- }
- )
-
- val fields = Array(
- StructField("projectName", StringType, true),
- StructField("workflowName", StringType, true),
- StructField("jobId", StringType, true),
- StructField("dataRangeStart", StringType, true),
- StructField("dataRangeEnd", StringType, true),
- StructField("jobStartTime", StringType, true),
- StructField("jobStatus", StringType, true),
- StructField("duration(seconds)", StringType, true),
- StructField("dataFlow", StringType, true)
- ) ++ datasource.map(datasource => StructField(s"to-$datasource", StringType, true)) ++
- Array(
- StructField("failStep", StringType, true),
- StructField("errorMessage", StringType, true))
-
- val value: RDD[Row] = ETLSparkSession.sparkSession.sparkContext.parallelize(rows)
- ETLSparkSession.sparkSession.createDataFrame(
- rowRDD = value,
- schema = StructType(fields))
-
- }
- }
- }
-}
-
-
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DeleteFileTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DeleteFileTransformer.scala
deleted file mode 100644
index 4a613e6..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DeleteFileTransformer.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.DataFrame
-
-// $COVERAGE-OFF$
-object DeleteFileTransformer extends Transformer {
-
- override def transform(args: Map[String, String]): DataFrame = {
- val filePath = args("filePath")
- val fileNamePattern = args("fileNamePattern")
- ETLLogger.info(s"delete filePath:${filePath}")
- ETLLogger.info(s"delete fileNamePattern:${fileNamePattern}")
- HDFSUtil.listFileUrl(filePath, fileNamePattern).foreach(file => HDFSUtil.delete(file))
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DetectChangeTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DetectChangeTransformer.scala
deleted file mode 100644
index b553487..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DetectChangeTransformer.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.utils.{ETLSparkSession, SparkCatalogUtil}
-import org.apache.spark.sql.DataFrame
-
-
-// $COVERAGE-OFF$
-
-/**
- * 自动检测变动
- */
-object DetectChangeTransformer extends Transformer {
-
- /**
- * 比较hive里ads table得到变动的数据view
- * @param args 参数
- * sourceViewName - 新数据源view
- * targetHiveTableName - 目标HiveTable名,用于读取字段列表
- * pkCols - ADS主键列表,用逗号分隔, 例如:data,copm_code,material_code
- * ignoreCols - 不需要比较差别的字段列表, 逗号分隔, 样例同上
- * @return 差异的表table view, 主要看is_newest字段
- * is_newest = 0 代表此数据在新view里不存在, 需要在ads里删除
- * is_newest = 1 代表有变动的, 需要在ads里upsert
- */
- override def transform(args: Map[String, String]): DataFrame = {
- val sourceViewName = args("sourceViewName").toString // 新数据源
- val targetHiveTableName = args("targetHiveTableName").toString // 目标hive table name
- val targetDbName = args("targetDbName").toString // 目标 db name
- val pkCols = args("pkCols").toString.split(",") // pg的pk有哪些字段
- val ignoreCols = args("ignoreCols").toString.split(",") // 不做查重的字段名列表
-
- val spark = ETLSparkSession.getHiveSparkSession()
- // 得到目标hive表的所有字段名
- val hiveCols = SparkCatalogUtil.getAllColNames(targetDbName, targetHiveTableName)
-
- // PK拼成的str
- val pkColsStr = pkCols.map(str => s"`$str`").mkString(",")
- // 非PK非不比较字段的str
- val normalColsStr = hiveCols
- .filter(str => !pkCols.contains(str) && !ignoreCols.contains(str))
- .map(str => s"`$str`").mkString(",")
- // 所有字段str
- val allColStr = hiveCols.map(str => s"`$str`").mkString(",")
-
- val allSql =
- s"""
- |SELECT
- | $allColStr
- | ,0 AS is_newest
- |FROM
- | $targetHiveTableName
- |UNION ALL
- |SELECT
- | $allColStr
- | ,1 AS is_newest
- |FROM
- | $sourceViewName
- |""".stripMargin
-
- val sql =
- s"""
- |WITH
- | all_in_one AS (
- | $allSql
- |)
- |, window_table_1 AS (
- | SELECT *,
- | COUNT(1) OVER (PARTITION BY $pkColsStr ) AS count_num,
- | dense_rank() OVER (
- | partition by $pkColsStr
- | order by
- | $normalColsStr
- | ) as dense_rank_num
- | FROM all_in_one
- |)
- |, window_table_2 as (
- | SELECT *,
- | first_value(dense_rank_num) over (
- | partition by
- | $pkColsStr
- | order by dense_rank_num desc
- | ) as max_dense_rank_num
- | from window_table_1
- |)
- |SELECT *
- |FROM window_table_2
- |WHERE
- | (max_dense_rank_num = 2 and is_newest = 1)
- | OR
- | count_num = 1
- |""".stripMargin
-
- spark.sql(sql)
-
- }
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DropExternalTableTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DropExternalTableTransformer.scala
deleted file mode 100644
index 583dbc0..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/DropExternalTableTransformer.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql
-
-import scala.jdk.CollectionConverters._
-
-// $COVERAGE-OFF$
-
-case class PartitionField(key: String, value: String)
-
-/*
- Example:
- - databaseName: developer
- - tableNamePrefix: pre_ods_
- - partition: year=2021,month=11,day=1,hour=10
- */
-
-object DropExternalTableTransformer extends Transformer {
- override def transform(args: Map[String, String]): sql.DataFrame = {
- val tablePathPrefix = args("tablePathPrefix")
- val databaseName = args("databaseName")
- val tableNamePrefix = args("tableNamePrefix")
- val partitionParameter = args("partition")
-
- ETLSparkSession.getHiveSparkSession().sql(s"use ${databaseName}")
- val tables: Array[String] = ETLSparkSession.getHiveSparkSession().sql(s"show tables like '${tableNamePrefix}*'")
- .toLocalIterator()
- .asScala
- .map(row => {
- val value = row.toSeq.mkString(",")
- val seq = value.substring(1, value.length - 1).split(",")
- seq(1).substring(tableNamePrefix.length)
- })
- .toArray
-
- val partitionFields = extractPartitionField(partitionParameter)
- val databaseDirectory = s"/warehouse/tablespace/external/hive/${databaseName}.db"
- val tableDirectorys = HDFSUtil.listFileUrl(databaseDirectory, tablePathPrefix)
-
- tables.foreach(tableName => {
- val sql = buildDropPartitionSQL(databaseName, tableNamePrefix, tableName, partitionFields)
- ETLSparkSession.getHiveSparkSession().sql(sql)
- ETLLogger.info(s"Hive partition ${partitionFields.mkString("Array(", ", ", ")")} in table ${tableName} has been deleted.")
- })
-
- tableDirectorys.foreach(tableDirectory => {
- val partitionDirectory = buildHDFSDirectory(tableDirectory, partitionFields)
- if (HDFSUtil.exists(partitionDirectory)) {
- HDFSUtil.delete(partitionDirectory)
- ETLLogger.info(s"HDFS partition ${partitionFields.mkString("Array(", ", ", ")")} in directory ${tableDirectory} has been deleted.")
- }
- })
-
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
- def buildDropPartitionSQL(databaseName: String, tableNamePrefix: String, tableName: String, partitionFields: Seq[PartitionField]): String = {
- val partitionArguments = partitionFields.map(partitionField => s"${partitionField.key} = ${partitionField.value}").mkString(", ")
- val sql = s"ALTER TABLE `${databaseName}`.`${tableNamePrefix}${tableName}` DROP IF EXISTS PARTITION(${partitionArguments})"
- sql
- }
-
- def buildHDFSDirectory(tableDirectory: String, partitionFields: Seq[PartitionField]): String = {
-
- val partitionPath = partitionFields.map(partitionField => s"${partitionField.key}=${partitionField.value}").mkString("/")
- val partitionDirectory = s"${tableDirectory}/${partitionPath}"
-
- partitionDirectory
- }
-
- def extractPartitionField(argument: String): Array[PartitionField] = argument.split(",")
- .map(field => {
- val keyValue = field.split("=")
- PartitionField(keyValue(0), keyValue(1))
- })
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/EnsureSinkConnectorFinished.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/EnsureSinkConnectorFinished.scala
deleted file mode 100644
index f91c7a3..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/EnsureSinkConnectorFinished.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.datasource.kafka.KafkaConfig.{buildNativeKafkaConsumerConfig, buildNativeKafkaProducerConfig}
-import org.apache.kafka.clients.admin.AdminClient
-import org.apache.kafka.clients.consumer.{KafkaConsumer, OffsetAndMetadata}
-import org.apache.kafka.common.TopicPartition
-import org.apache.spark.sql.DataFrame
-
-import java.lang
-import scala.annotation.tailrec
-import scala.collection.mutable
-import scala.jdk.CollectionConverters._
-
-// $COVERAGE-OFF$
-object EnsureSinkConnectorFinished extends Transformer {
-
- final case class WaitTimeoutException(msg: String) extends RuntimeException(msg)
-
- /**
- * make sure all data in topic already sink into HDFS by check consumer-group lag
- *
- * command: `kafka-consumer-groups --group consumer-group --describe --bootstrap-server localhost:9092 --command-config /opt/kafka_cfg/client.properties`
- */
- override def transform(args: Map[String, String]): DataFrame = {
- val consumerGroup = args("group")
- val kafkaTopic = args.getOrElse("kafkaTopic", "")
- val propertyPrefix = args.getOrElse("propertyPrefix", "kafka.producer")
- val consumerConfig = propertyPrefix match {
- case "kafka.consumer" => buildNativeKafkaConsumerConfig(consumerGroup).asJava
- case "kafka.producer" => buildNativeKafkaProducerConfig(consumerGroup).asJava
- }
- val kafkaClient = new KafkaConsumer(consumerConfig)
- val adminClient = AdminClient.create(consumerConfig)
-
- @tailrec def waitUntilNoLag(loopTimes: Int = 0): Unit = {
- val lag = getLag(consumerGroup, kafkaClient, adminClient, kafkaTopic)
- if (lag == 0) {
- ETLLogger.info(s"Current lag is $lag, check passed")
- } else if (loopTimes > 10) {
- throw WaitTimeoutException(s"Timeout waiting for 0 lag, current lag is $lag")
- } else {
- ETLLogger.warn(s"Current lag is $lag, wait for 30 seconds...")
- Thread.sleep(30 * 1000)
- waitUntilNoLag(loopTimes + 1)
- }
- }
-
- try {
- waitUntilNoLag()
- } finally {
- kafkaClient.close()
- adminClient.close()
- }
-
- sparkSession.emptyDataFrame
- }
-
- private def getLag(consumerGroup: String, kafkaClient: KafkaConsumer[Nothing, Nothing], adminClient: AdminClient, kafkaTopic: String) = {
- val consumedOffsets = adminClient.listConsumerGroupOffsets(consumerGroup).partitionsToOffsetAndMetadata().get()
- val endOffsets = kafkaClient.endOffsets(consumedOffsets.keySet)
-
- val consumedOffsetWithTopic : mutable.Map[TopicPartition, OffsetAndMetadata]= kafkaTopic match {
- case "" => consumedOffsets.asScala
- case _ => consumedOffsets.asScala.filter(it => it._1.topic == kafkaTopic)
- }
-
- if (Environment.CURRENT == Environment.PROD) {
- assert(consumedOffsetWithTopic.nonEmpty)
- }
-
- val endOffsetsWithTopic : mutable.Map[TopicPartition, lang.Long]= kafkaTopic match {
- case "" => endOffsets.asScala
- case _ => endOffsets.asScala.filter(it => it._1.topic == kafkaTopic)
- }
-
- val endOffset = endOffsetsWithTopic.values.map(_.toLong).sum
- val consumedOffset = consumedOffsetWithTopic.values.map(_.offset()).sum
-
- if (Environment.CURRENT == Environment.PROD) {
- assert(endOffset > 0, "The end offset can not be 0, which means there are no data in the topic")
- assert(consumedOffset > 0, "The consumed offset can not be 0, which means the consumer never consumed any data from topic")
- }
- ETLLogger.info(s"End offset $endOffset, consumed offset $consumedOffset")
- endOffset - consumedOffset
- }
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcAutoCreateDimTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcAutoCreateDimTransformer.scala
deleted file mode 100644
index fdc88a7..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcAutoCreateDimTransformer.scala
+++ /dev/null
@@ -1,158 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.modeling.excel.model.CreateDimMode
-import com.google.gson.{Gson, JsonElement, JsonObject}
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.POSTGRES
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuidName
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-import com.github.sharpdata.sharpetl.spark.datasource.HiveDataSource
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.spark.datasource.connection.JdbcConnection
-import org.apache.spark.sql._
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-import java.sql.Connection
-import java.util.Map.Entry
-import java.util.Properties
-import scala.jdk.CollectionConverters._
-
-// scalastyle:off
-object JdbcAutoCreateDimTransformer extends Transformer {
- private def genStrListFromJson(jsonObj: JsonObject, separation: String): Seq[String] = {
- jsonObj.entrySet.asScala.map {
- entry: Entry[String, JsonElement] =>
- s"${entry.getKey}$separation${entry.getValue.getAsString}"
- }.toList
- }
-
- private def genKeyListFromJson(jsonObj: JsonObject): Seq[String] = {
- jsonObj.entrySet().asScala.map(_.getKey).toSet.toList.sorted
- }
-
- private def genValueListFromJson(jsonObj: JsonObject): Seq[String] = {
- jsonObj.entrySet.asScala.map {
- entry: Entry[String, JsonElement] =>
- entry.getValue.getAsString
- }.toList
- }
-
- private def loadTempDataToDb(conn: Connection,
- dimTableColumnsAndTypeJsonObj: JsonObject,
- currentAndDimColumnsMappingJsonObj: JsonObject,
- currentAndDimPrimaryMappingJsonObj: JsonObject,
- currentDbType: String,
- currentDb: String,
- updateTable: String,
- businessCreateTime: String): String = {
- val conf = JdbcConnection(currentDb, currentDbType).getDefaultConfig
- val url = conf(JDBCOptions.JDBC_URL)
- val prop = new Properties()
- prop.setProperty(JDBCOptions.JDBC_DRIVER_CLASS, conf(JDBCOptions.JDBC_DRIVER_CLASS))
- prop.setProperty("user", conf("user"))
- prop.setProperty("password", conf("password"))
-
- val columns: Seq[String] = genStrListFromJson(dimTableColumnsAndTypeJsonObj, " ")
-
- val columnsStr = columns.toList.mkString(s", $ENTER")
- val tempTable = "temp_table_" + uuidName()
- val createSql =
- s"""
- |DROP TABLE IF EXISTS $tempTable;
- |CREATE TABLE ${tempTable}(
- |$columnsStr
- |);""".stripMargin
-
- conn.prepareStatement(createSql).execute()
-
- val selectColumns: Seq[String] = genStrListFromJson(currentAndDimColumnsMappingJsonObj, " as ")
-
- val dimColumns: Seq[String] = genValueListFromJson(currentAndDimColumnsMappingJsonObj)
-
- val currentPrimaryColumn: Seq[String] = genKeyListFromJson(currentAndDimPrimaryMappingJsonObj)
-
- val selectSql =
- s"""
- |select ${dimColumns.mkString(", ")}
- |from
- |(select ${selectColumns.mkString(", ")}, row_number() OVER (PARTITION BY ${currentPrimaryColumn.mkString(", ")} ORDER BY `$businessCreateTime` DESC) as row_number
- |from $updateTable) temp
- |where temp.row_number=1
- |""".stripMargin
-
- val df = new HiveDataSource().load(sparkSession, selectSql)
- df.write.mode(SaveMode.Append).jdbc(url, tempTable, prop)
- tempTable
- }
-
- private def loadInsertDataToDim(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessCreateTime: String): Unit = {
- val insertSql =
- s"""
- |with latest_current as (
- | select * from $currentTableName
- | where is_latest='1'
- |), insert_data as (
- | select ${currentTableColumnsList.map(field => "tmp." + quote(field, POSTGRES)).mkString(", ")}
- | from $tempTable tmp
- | left join latest_current
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where latest_current.${quote(primaryFields.head, POSTGRES)} is null
- |)
- |insert into $currentTableName (${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, start_time, is_active, is_latest, is_auto_created)
- |select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessCreateTime, '1', '1', '1' from insert_data;""".stripMargin
- conn.prepareStatement(insertSql).execute()
- }
-
- private def clearResource(tempTable: String, conn: Connection): Unit = {
- val dropSql =
- s"""
- |DROP TABLE IF EXISTS $tempTable;""".stripMargin
- conn.prepareStatement(dropSql).execute()
- }
-
- def loadData(args: collection.Map[String, String]): Unit = {
- val updateTable = args("updateTable")
- val createDimMode = args("createDimMode")
-
- val dimDb = args("dimDb")
- val dimDbType = args("dimDbType")
- val dimTable = args("dimTable")
-
- val currentBusinessCreateTime = args("currentBusinessCreateTime")
- val dimTableColumnsAndType = args("dimTableColumnsAndType")
- val currentAndDimColumnsMapping = args("currentAndDimColumnsMapping")
- val currentAndDimPrimaryMapping = args("currentAndDimPrimaryMapping")
- val conn = JdbcConnection(dimDb, dimDbType).getConnection()
-
- val dimTableColumnsAndTypeJsonObj: JsonObject = new Gson().fromJson(dimTableColumnsAndType, classOf[JsonObject])
- val currentAndDimColumnsMappingJsonObj: JsonObject = new Gson().fromJson(currentAndDimColumnsMapping, classOf[JsonObject])
- val currentAndDimPrimaryMappingJsonObj: JsonObject = new Gson().fromJson(currentAndDimPrimaryMapping, classOf[JsonObject])
-
- if (createDimMode.equals(CreateDimMode.ONCE)) {
- val tempTable = loadTempDataToDb(conn, dimTableColumnsAndTypeJsonObj, currentAndDimColumnsMappingJsonObj,
- currentAndDimPrimaryMappingJsonObj, dimDbType, dimDb, updateTable, currentBusinessCreateTime)
-
- val dimPrimaryFields: Seq[String] = genValueListFromJson(currentAndDimPrimaryMappingJsonObj)
-
- val dimTableColumnsList: Seq[String] = genValueListFromJson(currentAndDimColumnsMappingJsonObj)
-
- loadInsertDataToDim(conn, dimTable, dimPrimaryFields, tempTable, dimTableColumnsList, currentAndDimColumnsMappingJsonObj.getAsJsonPrimitive(currentBusinessCreateTime).getAsString)
- clearResource(tempTable, conn)
- conn.close()
- } else if (createDimMode.equals(CreateDimMode.ALWAYS)) {
- } else {
-
- }
- }
-
- override def transform(args: Map[String, String]): DataFrame = {
- loadData(args)
- sparkSession.emptyDataFrame
- }
-}
-// scalastyle:on
\ No newline at end of file
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcLoadTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcLoadTransformer.scala
deleted file mode 100644
index 502ae90..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcLoadTransformer.scala
+++ /dev/null
@@ -1,400 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.google.gson.{Gson, JsonElement, JsonObject}
-import com.github.sharpdata.sharpetl.core.util.Constants.LoadType.INCREMENTAL
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.POSTGRES
-import com.github.sharpdata.sharpetl.core.util.Constants.Separator.ENTER
-import com.github.sharpdata.sharpetl.core.util.DateUtil.YYYY_MM_DD_HH_MM_SS
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuidName
-import com.github.sharpdata.sharpetl.modeling.sql.dialect.SqlDialect.quote
-import com.github.sharpdata.sharpetl.spark.datasource.HiveDataSource
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.spark.datasource.connection.JdbcConnection
-import org.apache.spark.sql._
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-import java.sql.Connection
-import java.util.Map.Entry
-import java.util.{Date, Properties}
-import scala.jdk.CollectionConverters._
-
-// scalastyle:off
-
-object JdbcLoadTransformer extends Transformer {
- private def loadTempDataToDb(conn: Connection,
- currentTableColumnsAndType: JsonObject,
- currentDbType: String,
- currentDb: String,
- updateTable: String): String = {
- val conf = JdbcConnection(currentDb, currentDbType).getDefaultConfig
- val url = conf(JDBCOptions.JDBC_URL)
- val prop = new Properties()
- prop.setProperty(JDBCOptions.JDBC_DRIVER_CLASS, conf(JDBCOptions.JDBC_DRIVER_CLASS))
- prop.setProperty("user", conf("user"))
- prop.setProperty("password", conf("password"))
-
- val columns: Seq[String] =
- currentTableColumnsAndType.entrySet.asScala.map {
- entry: Entry[String, JsonElement] =>
- s"${quote(entry.getKey, currentDbType)} ${entry.getValue.getAsString}"
- }.toList
-
- val columnsStr = columns.toList.mkString(s", $ENTER")
- val tempTable = "temp_table_" + uuidName()
- val createSql =
- s"""
- |DROP TABLE IF EXISTS $tempTable;
- |CREATE TABLE ${tempTable}(
- |$columnsStr
- |);""".stripMargin
-
- ETLLogger.info(s"Executing SQL: \n $createSql")
-
- conn.prepareStatement(createSql).execute()
-
- val selectSql =
- s"""
- |select * from $updateTable
- |""".stripMargin
-
- val df = new HiveDataSource().load(sparkSession, selectSql)
- df.write.mode(SaveMode.Append).jdbc(url, tempTable, prop)
- tempTable
- }
-
- private def loadDeleteTempDataToDb(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessUpdateTime: String): String = {
- val nowDateTime = YYYY_MM_DD_HH_MM_SS.format(new Date())
- val tempDeleteTable = "temp_delete_table_" + uuidName()
- val createDeleteTempSql =
- s"""
- |create temp table $tempDeleteTable as
- |with latest_current as (
- | select * from $currentTableName
- | where is_latest='1'
- |), delete_data as (
- | select ${currentTableColumnsList.filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES)).mkString(", ")}, to_timestamp('$nowDateTime', 'yyyy-MM-dd HH24:mi:ss') as $businessUpdateTime, '0' as is_active
- | from latest_current
- | left join $tempTable tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where tmp.${quote(primaryFields.head, POSTGRES)} is null
- |), delete_data_for_load as (
- | select ${currentTableColumnsList.map(field => "delete_data." + quote(field, POSTGRES)).mkString(", ")}, delete_data.is_active
- | from delete_data
- | inner join latest_current
- | on ${primaryFields.map(field => "delete_data." + quote(field, POSTGRES) + " = latest_current." + quote(field, POSTGRES)).mkString(" and ")} and latest_current.is_active != delete_data.is_active
- |)
- |select * from delete_data_for_load;""".stripMargin
- conn.prepareStatement(createDeleteTempSql).execute()
- tempDeleteTable
- }
-
- private def loadUpdateTempDataToDb(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessUpdateTime: String): String = {
- val tempUpdateTable = "temp_update_table_" + uuidName()
- val createUpdateTempSql =
- s"""
- |create temp table $tempUpdateTable as
- |with latest_current as (
- | select * from $currentTableName
- | where is_latest='1'
- |), update_data as (
- | select ${currentTableColumnsList.map(field => "tmp." + quote(field, POSTGRES)).mkString(", ")}, '1' as is_active
- | from latest_current
- | inner join $tempTable tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | and (${currentTableColumnsList.diff(primaryFields).filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES) + " != tmp." + quote(field, POSTGRES)).mkString(" or ")})
- | and latest_current.${quote(businessUpdateTime, POSTGRES)} <= tmp.${quote(businessUpdateTime, POSTGRES)}
- |)
- |select * from update_data;""".stripMargin
- conn.prepareStatement(createUpdateTempSql).execute()
- tempUpdateTable
- }
-
- private def loadInsertTempDataToDb(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String]): String = {
- val tempInsertTable = "temp_insert_table_" + uuidName()
- val createInsertTempSql =
- s"""
- |create temp table $tempInsertTable as
- |with latest_current as (
- | select * from $currentTableName
- | where is_latest='1'
- |), insert_data as (
- | select ${currentTableColumnsList.map(field => "tmp." + quote(field, POSTGRES)).mkString(", ")}, '1' as is_active
- | from $tempTable tmp
- | left join latest_current
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where latest_current.${quote(primaryFields.head, POSTGRES)} is null
- |)
- |select * from insert_data;""".stripMargin
- conn.prepareStatement(createInsertTempSql).execute()
- tempInsertTable
- }
-
- private def execSCFull(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessCreateTime: String,
- businessUpdateTime: String): Unit = {
- val tempDeleteTable = loadDeleteTempDataToDb(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList, businessUpdateTime)
- val tempUpdateTable = loadUpdateTempDataToDb(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList, businessUpdateTime)
- val tempInsertTable = loadInsertTempDataToDb(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList)
-
- val updateSql =
- s"""
- |with update_data_for_load as (
- | select ${primaryFields.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime
- | from $tempDeleteTable
- | union all
- | select ${primaryFields.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime
- | from $tempUpdateTable
- |)
- |update $currentTableName
- |set end_time=update_data_for_load.$businessUpdateTime, is_latest='0'
- |from update_data_for_load
- |where ${primaryFields.map(field => currentTableName + "." + quote(field, POSTGRES) + " = update_data_for_load." + quote(field, POSTGRES)).mkString(" and ")} and $currentTableName.is_latest='1'""".stripMargin
-
- val insertSql =
- s"""
- |with insert_data_for_load as (
- | select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime as start_time, is_active, '1' as is_latest from $tempDeleteTable
- | union all
- | select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime as start_time, is_active, '1' as is_latest from $tempUpdateTable
- | union all
- | select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessCreateTime as start_time, is_active, '1' as is_latest from $tempInsertTable
- |)
- |insert into $currentTableName (${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, start_time, is_active, is_latest)
- |select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, start_time, is_active, is_latest from insert_data_for_load;""".stripMargin
- conn.prepareStatement(updateSql).execute()
- conn.prepareStatement(insertSql).execute()
- }
-
- private def execSCIncremental(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessCreateTime: String,
- businessUpdateTime: String): Unit = {
-
- val tempUpdateTable = loadUpdateTempDataToDb(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList, businessUpdateTime)
- val tempInsertTable = loadInsertTempDataToDb(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList)
-
- val updateSql =
- s"""
- |with update_data_for_load as (
- | select ${primaryFields.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime
- | from $tempUpdateTable
- |)
- |update $currentTableName
- |set end_time=update_data_for_load.$businessUpdateTime, is_latest='0'
- |from update_data_for_load
- |where ${primaryFields.map(field => currentTableName + "." + quote(field, POSTGRES) + " = update_data_for_load." + quote(field, POSTGRES)).mkString(" and ")} and $currentTableName.is_latest='1'""".stripMargin
-
- val insertSql =
- s"""
- |with insert_data_for_load as (
- | select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessUpdateTime as start_time, is_active, '1' as is_latest from $tempUpdateTable
- | union all
- | select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, $businessCreateTime as start_time, is_active, '1' as is_latest from $tempInsertTable
- |)
- |insert into $currentTableName (${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, start_time, is_active, is_latest)
- |select ${currentTableColumnsList.map(field => quote(field, POSTGRES)).mkString(", ")}, start_time, is_active, is_latest from insert_data_for_load;""".stripMargin
- conn.prepareStatement(updateSql).execute()
- conn.prepareStatement(insertSql).execute()
- }
-
- private def execNoSCFull(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessUpdateTime: String): Unit = {
- val deleteSql =
- s"""
- |with latest_current as (
- | select *
- | from $currentTableName
- |), delete_data as (
- | select ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES)).mkString(", ")}
- | from latest_current
- | left join ${quote(tempTable, POSTGRES)} tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where tmp.${quote(primaryFields.head, POSTGRES)} is null
- |), update_data as (
- | select ${primaryFields.map(field => "tmp." + quote(field, POSTGRES)).mkString(", ")}
- | from latest_current
- |-- 此处的or语句中,需要把update_time过滤掉
- | inner join ${quote(tempTable, POSTGRES)} tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | and (${currentTableColumnsList.diff(primaryFields).filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES) + " != tmp." + quote(field, POSTGRES)).mkString(" or ")})
- | and latest_current.${quote(businessUpdateTime, POSTGRES)} <= tmp.${quote(businessUpdateTime, POSTGRES)}
- |), delete_data_for_load as (
- | select ${primaryFields.map(quote(_, POSTGRES)).mkString(", ")} from delete_data
- | union all
- | select ${primaryFields.map(quote(_, POSTGRES)).mkString(", ")} from update_data
- |)
- |delete from $currentTableName
- |using delete_data_for_load
- |where ${primaryFields.map(field => currentTableName + "." + quote(field, POSTGRES) + " = delete_data_for_load." + quote(field, POSTGRES)).mkString(" and ")};""".stripMargin
-
- val insertSql =
- s"""
- |with latest_current as (
- | select *
- | from $currentTableName
- |), insert_data as (
- | select ${currentTableColumnsList.map("tmp." + quote(_, POSTGRES)).mkString(", ")}
- | from ${quote(tempTable, POSTGRES)} tmp
- | left join latest_current
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where latest_current.${quote(primaryFields.head, POSTGRES)} is null
- |), update_data as (
- | select ${currentTableColumnsList.map("tmp." + quote(_, POSTGRES)).mkString(", ")}
- | from latest_current
- |-- 此处的or语句中,需要把update_time过滤掉
- | inner join ${quote(tempTable, POSTGRES)} tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | and (${currentTableColumnsList.diff(primaryFields).filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES) + " != tmp." + quote(field, POSTGRES)).mkString(" or ")})
- | and latest_current.${quote(businessUpdateTime, POSTGRES)} <= tmp.${quote(businessUpdateTime, POSTGRES)}
- |), insert_data_for_load as (
- | select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from update_data
- | union all
- | select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from insert_data
- |)
- |insert into $currentTableName (${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")})
- |select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from insert_data_for_load;""".stripMargin
- conn.prepareStatement(deleteSql).execute()
- conn.prepareStatement(insertSql).execute()
- }
-
- private def execNoSCIncremental(conn: Connection,
- currentTableName: String,
- primaryFields: Seq[String],
- tempTable: String,
- currentTableColumnsList: Seq[String],
- businessUpdateTime: String): Unit = {
- val deleteSql =
- s"""
- |with latest_current as (
- | select *
- | from $currentTableName
- |), update_data as (
- | select ${primaryFields.map(field => "tmp." + quote(field, POSTGRES)).mkString(", ")}
- | from latest_current
- |-- 此处的or语句中,需要把update_time过滤掉
- | inner join ${quote(tempTable, POSTGRES)} tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | and (${currentTableColumnsList.diff(primaryFields).filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES) + " != tmp." + quote(field, POSTGRES)).mkString(" or ")})
- | and latest_current.${quote(businessUpdateTime, POSTGRES)} <= tmp.${quote(businessUpdateTime, POSTGRES)}
- |), delete_data_for_load as (
- | select ${primaryFields.map(quote(_, POSTGRES)).mkString(", ")} from update_data
- |)
- |delete from $currentTableName
- |using delete_data_for_load
- |where ${primaryFields.map(field => currentTableName + "." + quote(field, POSTGRES) + " = delete_data_for_load." + quote(field, POSTGRES)).mkString(" and ")};""".stripMargin
-
- val insertSql =
- s"""
- |with latest_current as (
- | select *
- | from $currentTableName
- |), insert_data as (
- | select ${currentTableColumnsList.map("tmp." + quote(_, POSTGRES)).mkString(", ")}
- | from ${quote(tempTable, POSTGRES)} tmp
- | left join latest_current
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | where latest_current.${quote(primaryFields.head, POSTGRES)} is null
- |), update_data as (
- | select ${currentTableColumnsList.map("tmp." + quote(_, POSTGRES)).mkString(", ")}
- | from latest_current
- |-- 此处的or语句中,需要把update_time过滤掉
- | inner join ${quote(tempTable, POSTGRES)} tmp
- | on ${primaryFields.map(field => "latest_current." + quote(field, POSTGRES) + " = tmp." + quote(field, POSTGRES)).mkString(" and ")}
- | and (${currentTableColumnsList.diff(primaryFields).filterNot(_.equals(businessUpdateTime)).map(field => "latest_current." + quote(field, POSTGRES) + " != tmp." + quote(field, POSTGRES)).mkString(" or ")})
- | and latest_current.${quote(businessUpdateTime, POSTGRES)} <= tmp.${quote(businessUpdateTime, POSTGRES)}
- |), insert_data_for_load as (
- | select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from update_data
- | union all
- | select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from insert_data
- |)
- |insert into $currentTableName (${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")})
- |select ${currentTableColumnsList.map(quote(_, POSTGRES)).mkString(", ")} from insert_data_for_load;""".stripMargin
- conn.prepareStatement(deleteSql).execute()
- conn.prepareStatement(insertSql).execute()
- }
-
- private def clearResource(tempTable: String, conn: Connection): Unit = {
- val dropSql =
- s"""
- |DROP TABLE IF EXISTS $tempTable;""".stripMargin
- conn.prepareStatement(dropSql).execute()
- }
-
- def loadData(args: collection.Map[String, String]): Unit = {
- val slowChanging = args("slowChanging").toBoolean
- val updateType = args("updateType")
-
- val updateTable = args("updateTable")
-
- val currentDb = args("currentDb")
- val currentDbType = args("currentDbType")
- val currentTable = args("currentTable")
-
- val primaryFields = args("primaryFields").split(",").toList
- val businessCreateTime = args("businessCreateTime")
- val businessUpdateTime = args("businessUpdateTime")
- val currentTableColumnsAndType = args("currentTableColumnsAndType")
-
- val conn = JdbcConnection(currentDb, currentDbType).getConnection()
-
- val currentTableColumnsAndTypeJsonObj: JsonObject = new Gson().fromJson(currentTableColumnsAndType, classOf[JsonObject])
- val tempTable = loadTempDataToDb(conn, currentTableColumnsAndTypeJsonObj, currentDbType, currentDb, updateTable)
-
- val currentTableColumnsList = currentTableColumnsAndTypeJsonObj.entrySet().asScala.map(_.getKey).toSet.toList.sorted
-
- val currentTableName = s"${quote(currentDb, POSTGRES)}.${quote(currentTable, POSTGRES)}"
- if (slowChanging) {
- if (INCREMENTAL.equals(updateType)) {
- execSCIncremental(conn, currentTableName, primaryFields, tempTable,
- currentTableColumnsList, businessCreateTime, businessUpdateTime)
- } else {
- execSCFull(conn, currentTableName, primaryFields, tempTable,
- currentTableColumnsList, businessCreateTime, businessUpdateTime)
- }
- } else {
- if (INCREMENTAL.equals(updateType)) {
- execNoSCIncremental(conn, currentTableName, primaryFields, tempTable,
- currentTableColumnsList, businessUpdateTime)
- } else {
- execNoSCFull(conn, currentTableName, primaryFields, tempTable, currentTableColumnsList, businessUpdateTime)
- }
- }
-
- clearResource(tempTable, conn)
- conn.close()
- }
-
- override def transform(args: Map[String, String]): DataFrame = {
- loadData(args)
- sparkSession.emptyDataFrame
- }
-
-}
-
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcResultSetTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcResultSetTransformer.scala
deleted file mode 100644
index 668b5e6..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcResultSetTransformer.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.datasource.connection.JdbcConnection
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.types._
-import org.apache.spark.sql.{DataFrame, Row}
-
-import java.sql.ResultSetMetaData
-import scala.collection.mutable.ListBuffer
-
-object JdbcResultSetTransformer extends Transformer {
-
- // scalastyle:off
- def getDataType(sourceType: String, scale: Int, precision: Int): DataType = {
- sourceType match {
- case _ if classOf[String].getName == sourceType => StringType
- case _ if classOf[Integer].getName == sourceType => IntegerType
- case _ if classOf[java.lang.Short].getName == sourceType => ShortType
- case _ if classOf[java.lang.Long].getName == sourceType => LongType
- case _ if classOf[java.math.BigDecimal].getName == sourceType => DecimalType(precision, scale)
- case _ if classOf[java.sql.Timestamp].getName == sourceType => TimestampType
- }
- }
- // scalastyle:on
-
- def getSparkSchema(meta: ResultSetMetaData): ListBuffer[StructField] = {
- val count = meta.getColumnCount
- val res = ListBuffer[StructField]()
- for (i <- 1 to count) {
- res += StructField(
- meta.getColumnLabel(i),
- getDataType(meta.getColumnClassName(i), meta.getScale(i), meta.getPrecision(i)))
- }
- res
- }
-
- def getResultSet(args: collection.Map[String, String]): DataFrame = {
- val query: String = args("sql")
-
- val conn = JdbcConnection(args("dbName"), args("dbType")).getConnection()
- val stat = conn.createStatement()
- stat.execute(query)
- val rs = stat.getResultSet
- if(rs != null) {
- val objs: ListBuffer[ListBuffer[Object]] = ListBuffer[ListBuffer[Object]]()
- val schema = StructType(getSparkSchema(rs.getMetaData).toSeq)
- while (rs.next()) {
- val row: ListBuffer[Object] = ListBuffer[Object]()
- val count = rs.getMetaData.getColumnCount
- for (i <- 1 to count) {
- row += rs.getObject(i)
- }
- objs += row
- }
- val rows: Seq[Row] = objs.map(v => Row.fromSeq(v.toSeq)).toSeq
- ETLSparkSession
- .sparkSession
- .createDataFrame(ETLSparkSession.sparkSession.sparkContext.parallelize(rows), schema)
- } else {
- ETLSparkSession.sparkSession.emptyDataFrame
- }
- }
-
- override def transform(args: Map[String, String]): DataFrame = {
- getResultSet(args)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JobDependencyCheckTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JobDependencyCheckTransformer.scala
deleted file mode 100644
index 7d83845..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/JobDependencyCheckTransformer.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.exception.Exception.JobDependenciesError
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.util.DateUtil.{L_YYYY_MM_DD_HH_MM_SS, LocalDateTimeToBigInt, YYYYMMDDHHMMSS}
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.core.util.StringUtil.BigIntConverter
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import org.apache.spark.sql.DataFrame
-
-import java.time.LocalDateTime
-
-import scala.math.BigInt.javaBigInteger2bigInt
-
-object JobDependencyCheckTransformer extends Transformer {
- override def transform(args: Map[String, String]): DataFrame = {
- val nextDataRangeEnd = LocalDateTime.parse(args("dataRangeEnd"), L_YYYY_MM_DD_HH_MM_SS).asBigInt()
- val dependencies: String = args("dependencies")
- if (!isNullOrEmpty(dependencies)) {
- val jobNames = dependencies.split(",").map(_.trim)
-
- val jobName = args("workflowName")
- val dependLogs = jobLogAccessor
- .getLatestSuccessJobLogByNames(jobNames)
- .filter(log => log.dataRangeEnd.asBigInt >= nextDataRangeEnd)
- if (dependLogs.length != jobNames.length) {
- val diff = jobNames.diff(dependLogs.map(log => log.getWorkflowName))
- val errorMessage =
- s"""
- |Dependencies of job $jobName is not completed! Current job will not run.
- |Not completed dependencies: (${diff.mkString(",")})
- |""".stripMargin
- ETLLogger.error(errorMessage)
- throw JobDependenciesError(errorMessage)
- }
- }
- sparkSession.emptyDataFrame
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/KafkaOperationTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/KafkaOperationTransformer.scala
deleted file mode 100644
index 23c74c1..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/KafkaOperationTransformer.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.utils.HttpStatusUtils
-import com.github.sharpdata.sharpetl.core.exception.Exception.CheckFailedException
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import CheckAllConnectorStatusTransformer.getConnectorList
-import CheckConnectorStatusTransformer.{buildEndpoint, getStatusResponse}
-import org.apache.http.client.methods.{HttpDelete, HttpPost, HttpPut}
-import org.apache.http.impl.client.{CloseableHttpClient, HttpClients}
-import org.apache.spark.sql.DataFrame
-
-
-// $COVERAGE-OFF$
-object KafkaOperationTransformer extends Transformer {
-
-
- override def transform(args: Map[String, String]): DataFrame = {
- val truststoreLocation = ETLConfig.getProperty("truststore.location")
- System.setProperty("javax.net.ssl.trustStore", truststoreLocation)
-
- val uri = ETLConfig.getKafkaProperties
- var connectorNames = args.getOrElse("connectorName", "").split(",")
- if (args.getOrElse("connectorName", "") == "") {
- connectorNames = getConnectorList(uri)
- }
- val httpclient: CloseableHttpClient = HttpClients.createDefault
- val operationCommand = args.getOrElse("operationCommand", "restart")
- var operationFailedConnectors: Array[String] = Array.empty
- connectorNames.foreach(connectorName =>
- try {
- runCommand(httpclient, uri, operationCommand, connectorName)
- }
- catch {
- case _: RuntimeException => operationFailedConnectors = operationFailedConnectors :+ connectorName
- }
- )
- if (operationFailedConnectors.length > 0) {
- ETLLogger.error(s"Some Connectors $operationCommand Failed")
- throw new CheckFailedException(s"$operationCommand Failed Connector Names: ${operationFailedConnectors.mkString("{", ", ", "}")}")
- }
- sparkSession.emptyDataFrame
- }
-
- private def runCommand(httpclient: CloseableHttpClient, uri: String, operationCommand: String,
- connectorName: String): Unit = {
- operationCommand match {
- case "restart" => restartTask(httpclient, uri, connectorName)
- case "stop" => stopConnectors(httpclient, uri, connectorName)
- case "delete" => deleteConnectors(httpclient, uri, connectorName)
- case "resume" => resumeConnectors(httpclient, uri, connectorName)
- case _ => throw new CheckFailedException("Kafka Operation Command Not Found, Supported Command: restart, stop, delete")
- }
- }
-
- private def restartConnector(httpclient: CloseableHttpClient, uri: String, connectorName: String): Unit = {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName + "/restart?includeTasks=true&onlyFailed=true")
- val httpPost = new HttpPost(endpoint)
- val response = httpclient.execute(httpPost)
- val statusCode = response.getStatusLine
- if (HttpStatusUtils.isSuccessful(statusCode)) {
- ETLLogger.info(s"Restart connector $connectorName succeed.")
- ETLLogger.info(s"The Response is ${response.toString}")
- } else {
- ETLLogger.error(s"Restart connector $connectorName failed.")
- ETLLogger.error(s"Error: ${response.getStatusLine.getReasonPhrase}")
- throw new CheckFailedException(s"Restart Connector $connectorName Failed, caused by " + response.getStatusLine.getReasonPhrase)
- }
- }
-
- private def restartTask(httpclient: CloseableHttpClient, uri: String, connectorName: String): Unit = {
- val kafkaResponse = getStatusResponse(httpclient, uri, connectorName)
- val taskId = kafkaResponse.getTaskId
- taskId.foreach(id => {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName + "/tasks/" + id + "/restart")
- val response = httpclient.execute(new HttpPost(endpoint))
- if (HttpStatusUtils.isSuccessful(response.getStatusLine)) {
- ETLLogger.info(s"Restart connector $connectorName succeed, task id is $id")
- } else {
- ETLLogger.error(s"Restart connector $connectorName failed, task id is $id")
- throw new CheckFailedException(s"Restart Connector $connectorName Failed, caused by " + response.getStatusLine.getReasonPhrase)
- }
- })
- }
-
- private def stopConnectors(httpclient: CloseableHttpClient, uri: String, connectorName: String): Unit = {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName + "/pause")
- val httpPut = new HttpPut(endpoint)
- val stopResponse = httpclient.execute(httpPut)
- val statusCode = stopResponse.getStatusLine
- if (HttpStatusUtils.isSuccessful(statusCode)) {
- ETLLogger.info(s"Stop connector $connectorName succeed.")
- ETLLogger.info(s"The Response is ${stopResponse.toString}")
- } else {
- ETLLogger.error(s"Stop connector $connectorName failed.")
- ETLLogger.error(s"Stop Error: ${stopResponse.getStatusLine.getReasonPhrase}")
- throw new CheckFailedException(s"Stop Connector $connectorName Failed, caused by " + stopResponse.getStatusLine.getReasonPhrase)
- }
- }
-
- private def deleteConnectors(httpclient: CloseableHttpClient, uri: String, connectorName: String): Unit = {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName)
- val httpDelete = new HttpDelete(endpoint)
- val deleteResponse = httpclient.execute(httpDelete)
- val statusCode = deleteResponse.getStatusLine
- if (HttpStatusUtils.isSuccessful(statusCode)) {
- ETLLogger.info(s"Delete connector $connectorName succeed.")
- ETLLogger.info(s"The Delete Response is ${deleteResponse.toString}")
- } else {
- ETLLogger.error(s"Delete connector $connectorName failed.")
- ETLLogger.error(s"Delete Error: ${deleteResponse.getStatusLine.getReasonPhrase}")
- throw new CheckFailedException(s"Delete Connector $connectorName Failed, caused by " + deleteResponse.getStatusLine.getReasonPhrase)
- }
- }
-
- private def resumeConnectors(httpclient: CloseableHttpClient, uri: String, connectorName: String): Unit = {
- val endpoint = buildEndpoint(uri, "/connectors/" + connectorName + "/resume")
- val httpPut = new HttpPut(endpoint)
- val resumeResponse = httpclient.execute(httpPut)
- val statusCode = resumeResponse.getStatusLine
- if (HttpStatusUtils.isSuccessful(statusCode)) {
- ETLLogger.info(s"Resume connector $connectorName succeed.")
- ETLLogger.info(s"The Response is ${resumeResponse.toString}")
- } else {
- ETLLogger.error(s"Resume connector $connectorName failed.")
- ETLLogger.error(s"Resume Error: ${resumeResponse.getStatusLine.getReasonPhrase}")
- throw new CheckFailedException(s"Resume Connector $connectorName Failed, caused by " + resumeResponse.getStatusLine.getReasonPhrase)
- }
- }
-
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NoPartitionAlertTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NoPartitionAlertTransformer.scala
deleted file mode 100644
index 2a6757c..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NoPartitionAlertTransformer.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.exception.Exception.PartitionNotFoundException
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.utils.{ETLSparkSession, SparkCatalogUtil}
-import org.apache.spark.sql
-import org.joda.time.DateTime
-import org.joda.time.format.DateTimeFormat
-
-// $COVERAGE-OFF$
-object NoPartitionAlertTransformer extends Transformer {
- override def transform(args: Map[String, String]): sql.DataFrame = {
-
- val databaseName = args("databaseName")
- val tables = args("tables").split(",").map(_.trim).toList
- val interval = args("interval").toInt
- val unit = args("unit")
- val endDate = DateTime.parse(args("endDate"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"))
-
- var parNotFoundTables: List[String] = List.empty
-
- unit.toUpperCase match {
- case "DAY" =>
- val partitionsToScan = dailyPartitionsToScan(endDate, interval)
- parNotFoundTables = tables.filter(table => {
- !partitionExist(databaseName, table, partitionsToScan)
- })
- case _ => throw new IllegalArgumentException(s"Unsupported date interval type UNIT:[${unit}]")
- }
-
- if (parNotFoundTables.nonEmpty) {
- val errMessage = s"Can not found partitions within interval [${interval} ${unit}] for " +
- s"tables: [${parNotFoundTables.mkString(",")}], please check the correspond job status"
- throw new PartitionNotFoundException(errMessage)
- }
-
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
- def partitionExist(databaseName: String, tableName: String, partitions: List[String]): Boolean = {
- partitions.exists(partition => {
- val exists =
- try {
- SparkCatalogUtil.isPartitionDataExists(tableName, partition)
- } catch {
- case e: Exception =>
- ETLLogger.warn(s"Exception occur when get partition[${partition.mkString(",")}] " +
- s"from table[${databaseName}.${tableName}] with exception:[${e.getMessage}]")
- false
- }
- exists
- })
-
- }
-
-
- def dailyPartitionsToScan(end: DateTime, interval: Integer): List[String] = {
- (0 until interval + 1)
- .map(end.minusDays)
- .map(buildDailyPartitionPartVal)
- .toList
- }
-
- def buildDailyPartitionPartVal(date: DateTime): String = {
- List(s"""year="${date.getYear}"""",
- s"""month="${DateTimeFormat.forPattern("MM").print(date)}"""",
- s"""day="${DateTimeFormat.forPattern("dd").print(date)}""""
- ).mkString(" AND ")
- }
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NonSCDTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NonSCDTransformer.scala
deleted file mode 100644
index 40bdcb9..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/NonSCDTransformer.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.jobIdColumn
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import SCDTransformer.{getAppendSelectClause, getPartitionClause}
-import com.github.sharpdata.sharpetl.spark.datasource.HiveDataSource
-import com.github.sharpdata.sharpetl.spark.utils.SparkCatalogUtil
-import org.apache.spark.sql._
-
-object NonSCDTransformer extends Transformer {
-
- // scalastyle:off
- def genSql(args: Map[String, String]): String = {
- val odsViewName = args("odsViewName")
- val dwViewName = args("dwViewName")
-
- val partitionField = args.getOrElse("partitionField", "")
- val partitionByClause = args("primaryFields")
- .split(",")
- .mkString(", ")
- val createTimeField = args("createTimeField")
- val updateTimeField = args("updateTimeField")
- val surrogateField = args("surrogateField")
- val dropUpdateTimeField = args.getOrElse("dropUpdateTimeField", false.toString).toBoolean
- val orderByClause = updateTimeField
-
- val partitionFormat = args.getOrElse("partitionFormat", "")
-
- val timeFormat = s"'${args.getOrElse("timeFormat", "yyyy-MM-dd HH:mm:ss")}'"
-
- val updateCols = updateTimeField.split(",").toSet
- val createCols = createTimeField.split(",").toSet
- val excludeCols =
- Set(jobIdColumn, "start_time", "end_time", "is_latest", "is_active",
- "rank_num", "count_num", "dense_rank_num", "data_status", "match_value",
- "max_dense_rank_num", "older_data_end_time") ++ partitionFormat.split("/").toSet
-
- val cols = SparkCatalogUtil.getAllColNamesOfTempTable(odsViewName)
- val selectClause = cols.filterNot(excludeCols.contains).mkString(",")
-
- val droppedSortColumns = if (dropUpdateTimeField) {
- cols.filterNot(excludeCols.contains).filterNot(updateCols.contains).filterNot(createCols.contains).mkString(",")
- } else {
- selectClause
- }
-
- s"""
- |select $droppedSortColumns,
- | ${getPartitionClause(partitionField, partitionFormat, timeFormat)}
- | ${getAppendSelectClause(args)}
- | from (select $selectClause,
- | row_number() over (partition by $partitionByClause order by $orderByClause desc) as rank_num
- | from (
- | select $selectClause from $odsViewName
- | union all
- | select $selectClause from $dwViewName
- | )
- | )
- | where rank_num = 1
- |""".stripMargin
- }
- // scalastyle:on
-
- override def transform(args: Map[String, String]): DataFrame = {
- val sql = genSql(args)
- ETLLogger.info(s"[Sql]:$sql")
- new HiveDataSource().load(sparkSession, sql)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/SCDTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/SCDTransformer.scala
deleted file mode 100644
index 8342925..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/SCDTransformer.scala
+++ /dev/null
@@ -1,220 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.datasource.HiveDataSource
-import com.github.sharpdata.sharpetl.core.util.Constants.LoadType.{FULL, INCREMENTAL}
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.jobIdColumn
-import com.github.sharpdata.sharpetl.core.util.{DateUtil, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.spark.utils.SparkCatalogUtil
-import org.apache.spark.sql._
-
-import scala.util.Try
-
-object SCDTransformer extends Transformer {
- def getAppendSelectClause(args: Map[String, String]): String = {
- s""" '${args("jobId")}' as $jobIdColumn""".stripMargin.trim
- }
-
- def getCaseClause(args: Map[String, String],
- hardDeleteEndTime: String): String = {
- args.get("dwUpdateType") match {
- case Some(INCREMENTAL) =>
- s""" -- incremental --
- | -- unchanged active data
- | when is_active = 1 and is_latest = 1 and count_num = 1 and data_status = 'older_or_unchanged'
- | then array('1', '1', end_time)""".stripMargin
- case Some(FULL) =>
- s""" -- full --
- | -- deleted: new deleted data(full hard delete)
- | when is_active = 1 and is_latest = 1 and count_num = 1 and data_status = 'older_or_unchanged'
- | then array('0', '1', '$hardDeleteEndTime')
- | -- unchanged deleted data(deleted before)
- | when is_active = 0 and is_latest = 1 and count_num = 1 and data_status = 'older_or_unchanged'
- | then array('0', '1', end_time)""".stripMargin
- case _ => ???
- }
- }
-
- // scalastyle:off
- def scdSql(args: Map[String, String]): String = {
- val odsViewName = args("odsViewName")
- val dwViewName = args("dwViewName")
- val partitionByClause = args("primaryFields")
- .split(",")
- .mkString(", ")
-
- val partitionField = args.getOrElse("partitionField", "")
- val createTimeField = args("createTimeField")
- val updateTimeField = args("updateTimeField")
- val surrogateField = args("surrogateField")
- val dropUpdateTimeField = args.getOrElse("dropUpdateTimeField", false.toString).toBoolean
- val orderByClause = updateTimeField
-
- val partitionFormat = args.getOrElse("partitionFormat", "")
-
- val timeFormat = s"'${args.getOrElse("timeFormat", "yyyy-MM-dd HH:mm:ss")}'"
-
- val partitionClause = getPartitionClause(partitionField, partitionFormat, timeFormat)
-
- val updateCols = updateTimeField.split(",").toSet
- val createCols = createTimeField.split(",").toSet
- val excludeCols =
- Set(jobIdColumn, "start_time", "end_time", "is_latest", "is_active",
- "rank_num", "count_num", "dense_rank_num", "data_status", "match_value",
- "max_dense_rank_num", "older_data_end_time") ++ partitionFormat.split("/").toSet
-
- val cols = SparkCatalogUtil.getAllColNamesOfTempTable(odsViewName)
- val selectClause = cols.filterNot(excludeCols.contains).mkString(",")
-
- val droppedSortColumns = if (dropUpdateTimeField) {
- cols.filterNot(excludeCols.contains).filterNot(updateCols.contains).filterNot(createCols.contains).mkString(",")
- } else {
- selectClause
- }
-
- val distinctByClause = s"(${cols.filterNot(excludeCols.contains).filterNot(it => it == surrogateField).mkString(",")})"
-
- val time = Try(DateUtil.YYYY_MM_DD_HH_MM_SS.parse(args("dataRangeStart")).getTime)
- .getOrElse(args("dataRangeStart").toLong)
-
- val hardDeleteEndTime = DateUtil
- .YYYY_MM_DD_HH_MM_SS
- .format(time - 1 * 1000)
-
- val appendSelectClause = getAppendSelectClause(args)
- val caseClause = getCaseClause(args, hardDeleteEndTime)
-
- val joinClause = args("primaryFields").split(",").map(col => s"updated.$col = dw.$col").mkString(" and ")
- val doesNotExistsInDwClause = args("primaryFields").split(",").map(col => s"dw.$col is null").mkString(" or ")
-
- s"""
- |with ods as (select $selectClause
- | from (select $selectClause,
- | row_number() over (partition by $distinctByClause order by $orderByClause desc) as rank_num
- | from $odsViewName)
- | where rank_num = 1), -- distinct ods data by all cols
- | dw as (select $droppedSortColumns,
- | start_time,
- | end_time,
- | is_active,
- | is_latest
- | from $dwViewName), -- affected dw data
- | dw_history as (select *
- | from dw
- | where is_latest = 0), -- passively update dw history data, no changes
- | ods_and_latest_dw as (select ${droppedSortColumns.split(",").map(col => s"updated.$col").mkString(",")},
- | case
- | -- first time create, using min(createTime, updateTime) as start_time
- | when updated.pre_update_time is null and ($doesNotExistsInDwClause) then updated.$createTimeField
- | else updated.$updateTimeField
- | end as start_time,
- | updated.end_time,
- | case
- | when (updated.rank_num = 1) then '1'
- | else '0'
- | end as `is_active`,
- | case
- | when (updated.rank_num = 1) then '1'
- | else '0'
- | end as `is_latest`,
- | case
- | when (updated.rank_num = 1) then 'latest'
- | else 'newer'
- | end as `data_status`
- | from (select $selectClause,
- | row_number() over rank_window as rank_num,
- | lag($updateTimeField) over rank_window as end_time,
- | lead($updateTimeField) over rank_window as pre_update_time
- | from ods
- | window
- | rank_window as (partition by $partitionByClause order by $orderByClause desc)
- | ) updated
- | left join dw on $joinClause and dw.is_active = '1' and dw.is_latest = '1'
- | union all
- | select *,
- | 'older_or_unchanged' as data_status
- | from dw
- | where is_latest = 1), -- latest & newer ods data union all older & unchanged dw data
- | count_and_rank as (select *,
- | count(1) over dense_rank_window as count_num,
- | dense_rank() over dense_rank_window as dense_rank_num
- | from ods_and_latest_dw
- | window
- | dense_rank_window as (partition by $partitionByClause order by start_time)),
- | max_rank_and_end_time as (select *,
- | first_value(dense_rank_num) over part_window as max_dense_rank_num,
- | lag(start_time) over part_window as older_data_end_time
- | from count_and_rank
- | window
- | part_window as (partition by $partitionByClause order by start_time desc)),
- | match_result as (select $droppedSortColumns,
- | start_time,
- | case
- | -- new or updated data
- | when data_status = 'latest' or data_status = 'newer'
- | then array(is_active, is_latest, end_time)
- | -- updated: older version of updated data
- | when data_status = 'older_or_unchanged' and is_active = 1 and is_latest = 1 and max_dense_rank_num > 1
- | then array('0', '0', older_data_end_time)
- | -- re-add: re-add deleted data
- | when data_status = 'older_or_unchanged' and is_active = 0 and is_latest = 1 and count_num = 2
- | then array('0', '0', end_time)
- | -- full or re-run(incremental or full)
- | -- unchanged: duplicated active unchanged data
- | when data_status = 'older_or_unchanged' and is_active = 1 and is_latest = 1 and count_num = 2 and
- | max_dense_rank_num = 1
- | then array('1', '1', end_time)
- |$caseClause
- | end as match_value
- | from max_rank_and_end_time
- | where not (
- | -- filter --
- | -- full or rerun(incremental or full)
- | -- unchanged: duplicated active unchanged data
- | is_active = 1
- | and is_latest = 1
- | and count_num = 2
- | and data_status = 'latest'
- | and max_dense_rank_num = 1
- | )
- | )
- |select $droppedSortColumns,
- | cast(start_time as timestamp) as start_time,
- | cast(match_value[2] as timestamp) as end_time,
- | match_value[0] as is_active,
- | match_value[1] as is_latest,
- | $partitionClause
- | $appendSelectClause
- |from match_result
- |union all
- |-- incremental or full --
- |-- history not latest unchangedDataHistory
- |select $droppedSortColumns,
- | cast(start_time as timestamp) as start_time,
- | cast(end_time as timestamp) as end_time,
- | is_active,
- | is_latest,
- | $partitionClause
- | $appendSelectClause
- |from dw_history
- |""".stripMargin
- }
- // scalastyle:on
-
- def getPartitionClause(partitionField: String, partitionFormat: String, timeFormat: String): String = {
- partitionFormat match {
- case "" => ""
- case "year/month/day" =>
- s"""from_unixtime(unix_timestamp($partitionField, $timeFormat), 'yyyy') as year,
- | from_unixtime(unix_timestamp($partitionField, $timeFormat), 'MM') as month,
- | from_unixtime(unix_timestamp($partitionField, $timeFormat), 'dd') as day,""".stripMargin
- case _ => ???
- }
- }
-
- override def transform(args: Map[String, String]): DataFrame = {
- val sql = scdSql(args)
- ETLLogger.info(s"[SCD Sql]:$sql")
- new HiveDataSource().load(sparkSession, sql)
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/Transformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/Transformer.scala
deleted file mode 100644
index 56cee73..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/Transformer.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.annotation.Annotations.Stable
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.apache.spark.sql.DataFrame
-
-
-@Stable(since = "1.0.0")
-trait Transformer {
-
- /**
- * read
- */
- def transform(args: Map[String, String]): DataFrame = ???
-
- /**
- * write
- */
- def transform(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ???
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/ZipFilesTransformer.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/ZipFilesTransformer.scala
deleted file mode 100644
index 4078cb6..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/ZipFilesTransformer.scala
+++ /dev/null
@@ -1,195 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil.extractFileName
-import com.github.sharpdata.sharpetl.core.util.{ETLLogger, HDFSUtil}
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql
-
-import java.io.FileOutputStream
-import java.time.{LocalDateTime, ZoneId}
-import java.util.zip.{ZipEntry, ZipOutputStream}
-import scala.collection.mutable
-
-// $COVERAGE-OFF$
-/**
- * usage:
- * 1. for simple file name and date range:
- * -- step=1
- * -- source
- * -- dataSourceType=transformation
- * -- className=com.github.sharpdata.sharpetl.spark.transformation.ZipFilesTransformer
- * -- transformerType=object
- * -- methodName=transform
- * -- fileBasePath=hdfs:///data/test
- * -- fileStart=test-20220503000000.txt
- * -- fileEnd=test-20220603000000.txt
- * -- zipFilePath=hdfs:///data/test
- * -- zipFileName=test-202206.zip
- * -- target
- * -- dataSourceType=do_nothing
- * 2. for multiple files compress to multiple zip files:
- * -- step=1
- * -- source
- * -- dataSourceType=transformation
- * -- className=com.github.sharpdata.sharpetl.spark.transformation.ZipFilesTransformer
- * -- transformerType=object
- * -- methodName=transform
- * -- fileBasePath=hdfs:///data/test
- * -- fileStart=test-20220503000000.txt,ships_20220528.txt
- * -- fileEnd=test-20220603000000.txt,ships_20220530.txt
- * -- zipFilePath=hdfs:///data/test
- * -- zipFileName=test-202206.zip,ships.zip
- * -- target
- * -- dataSourceType=do_nothing
- * 3. for multiple files compress to one single zip files:
- * -- step=1
- * -- source
- * -- dataSourceType=transformation
- * -- className=com.github.sharpdata.sharpetl.spark.transformation.ZipFilesTransformer
- * -- transformerType=object
- * -- methodName=transform
- * -- fileBasePath=hdfs:///data/test
- * -- fileStart=test-20220503000000.txt,ships_20220528.txt
- * -- fileEnd=test-20220603000000.txt,ships_20220530.txt
- * -- zipFilePath=hdfs:///data/test
- * -- zipFileName=results.zip
- * -- target
- * -- dataSourceType=do_nothing
- * 4. compress file by file name pattern regex and modified time:
- * -- step=1
- * -- source
- * -- dataSourceType=transformation
- * -- className=com.github.sharpdata.sharpetl.spark.transformation.ZipFilesTransformer
- * -- transformerType=object
- * -- methodName=transform
- * -- fileBasePath=hdfs:///data/test
- * -- zipFilePath=hdfs:///data/test
- * -- zipFileName=results.zip
- * -- filterByLastModified=true
- * -- fileNamePattern=New Area - file_name_\d\d-\w\w\w-\d\d\d\d.zip
- * -- fileStart=2022-06-10 00:00:00
- * -- fileEnd=2022-06-11 00:00:00
- * -- target
- * -- dataSourceType=do_nothing
- */
-object ZipFilesTransformer extends Transformer {
- val BUFFER_SIZE = 1024
- var gfos: FileOutputStream = _
- var gzipOut: ZipOutputStream = _
- val fileBuffer: mutable.Buffer[String] = mutable.Buffer.empty
-
- override def transform(args: Map[String, String]): sql.DataFrame = {
- gfos = null // scalastyle:off
- gzipOut = null // scalastyle:off
- fileBuffer.clear()
- val fileBasePath = args("fileBasePath")
- val fileStart = args("fileStart") // include
- val fileEnd = args("fileEnd") // exclude
- val zipFilePath = args("zipFilePath")
- val zipFileName = args("zipFileName")
- val filterByLastModified = args.getOrElse("filterByLastModified", "false").toBoolean
- val fileNamePattern = args("fileNamePattern")
-
- val zipFiles = zipFileName.split(",")
- val sameZip = zipFiles.size == 1
-
- if (filterByLastModified) {
- // hard-code timezone to Z which means UTC timezone, because [[file.getModificationTime]] returns milliseconds since January 1, 1970 UTC.
- val startTime = LocalDateTime.parse(fileStart, L_YYYY_MM_DD_HH_MM_SS).atZone(ZoneId.of("Z")).toEpochSecond * 1000
- val endTime = LocalDateTime.parse(fileEnd, L_YYYY_MM_DD_HH_MM_SS).atZone(ZoneId.of("Z")).toEpochSecond * 1000
- val fileLists = HDFSUtil.listFileStatus(fileBasePath)
- .filter { file =>
- file.getPath.getName.matches(fileNamePattern) &&
- file.getModificationTime >= startTime &&
- file.getModificationTime < endTime
- }
- .map(_.getPath.getName)
- .toSeq
- processCompress(fileBasePath, zipFileName, zipFiles, sameZip, fileStart, fileEnd, 0, fileLists)
- } else {
- fileStart.split(",")
- .zip(fileEnd.split(","))
- .zipWithIndex
- .foreach {
- case ((fileStart: String, fileEnd: String), idx) =>
- val fileLists = HDFSUtil.recursiveListFiles(fileBasePath)
- .map(extractFileName)
- .filter(file => file.length == fileStart.length && file >= fileStart && file < fileEnd)
- .toSeq
- processCompress(fileBasePath, zipFileName, zipFiles, sameZip, fileStart, fileEnd, idx, fileLists)
- }
- }
-
- if (sameZip) {
- gzipOut.close()
- gfos.close()
- uploadFileAndDeleteLocalFile(zipFilePath, zipFileName)
- } else {
- zipFiles.foreach(zip => {
- uploadFileAndDeleteLocalFile(zipFilePath, zip)
- })
- }
-
- fileBuffer.foreach { file =>
- ETLLogger.warn(s"Deleting file from HDFS $fileBasePath/$file")
- HDFSUtil.delete(s"$fileBasePath/$file", false)
- }
-
- ETLSparkSession.sparkSession.emptyDataFrame
- }
-
- private def processCompress(fileBasePath: String, zipFileName: String, zipfiles: Array[String], sameZip: Boolean,
- fileStart: String, fileEnd: String, idx: Int, fileLists: Seq[String]) = {
- ETLLogger.warn(s"Files will be zipped and deleted by [$fileStart, $fileEnd): \n ${fileLists.mkString(",\n")}")
-
- val (fos: FileOutputStream, zipOut: ZipOutputStream) = createZipStream(if (sameZip) zipFileName else zipfiles(idx), sameZip)
- fileLists.foreach(file => {
- fileBuffer.append(file)
- val fis = HDFSUtil.readFile(s"$fileBasePath/$file")
- val zipEntry = new ZipEntry(file)
- zipOut.putNextEntry(zipEntry)
-
- val bytes = new Array[Byte](BUFFER_SIZE)
- var length = 0
-
- while (length >= 0) {
- length = fis.read(bytes)
- if (length > 0) {
- zipOut.write(bytes, 0, length)
- }
- }
- fis.close()
- })
-
- if (!sameZip) {
- zipOut.close()
- fos.close()
- }
- }
-
- private def uploadFileAndDeleteLocalFile(zipFilePath: String, zip: String) = {
- ETLLogger.info(s"Uploading zip file $zip to HDFS $zipFilePath/$zip...")
- HDFSUtil.moveFromLocal(zip, s"$zipFilePath/$zip")
- }
-
- private def createZipStream(zipFileName: String, sameZip: Boolean) = {
- if (!sameZip) {
- val fos = new FileOutputStream(zipFileName)
- val zipOut = new ZipOutputStream(fos)
- (fos, zipOut)
- } else {
- if (gfos == null && gzipOut == null) {
- val fos = new FileOutputStream(zipFileName)
- val zipOut = new ZipOutputStream(fos)
- gfos = fos
- gzipOut = zipOut
- (gfos, gzipOut)
- } else {
- (gfos, gzipOut)
- }
- }
-
- }
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ConvertUtils.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ConvertUtils.scala
deleted file mode 100644
index 815b89b..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ConvertUtils.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import org.apache.spark.sql.Column
-
-object ConvertUtils {
- def strsToColumns: Seq[String] => Seq[Column] = _.map(new Column(_))
-
- def strToColumn: String => Column = new Column(_)
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ETLSparkSession.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ETLSparkSession.scala
deleted file mode 100644
index 54bdf9a..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ETLSparkSession.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.core.quality.QualityCheckRule
-import com.github.sharpdata.sharpetl.core.repository.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.util.Constants.Environment
-import com.github.sharpdata.sharpetl.core.util.ETLConfig.purgeHiveTable
-import com.github.sharpdata.sharpetl.core.util.{ETLConfig, ETLLogger}
-import com.github.sharpdata.sharpetl.spark.extension.UdfInitializer
-import com.github.sharpdata.sharpetl.spark.job.SparkWorkflowInterpreter
-import com.github.sharpdata.sharpetl.spark.utils.EmbeddedHive.sparkWithEmbeddedHive
-import org.apache.spark.SparkConf
-import org.apache.spark.sql.SparkSession
-
-object ETLSparkSession {
- var local = false
- private var wfName = "default"
- private var sparkConf: SparkConf = _
- private var autoCloseSession: Boolean = true
-
- lazy val sparkSession: SparkSession = {
- if (Environment.CURRENT == Environment.EMBEDDED_HIVE) {
- sparkWithEmbeddedHive
- } else if (local) {
- SparkSession.builder().config(conf()).getOrCreate()
- } else {
- //System.setProperty("atlas.conf", "/usr/hdp/current/spark2-client/conf/")
- SparkSession.builder().config(conf()).enableHiveSupport().getOrCreate()
- }
- }
-
- def conf(): SparkConf = {
- if (sparkConf == null) {
- ETLLogger.info("init spark conf...")
- sparkConf = new SparkConf()
- if (!sparkConf.contains("spark.master")) {
- sparkConf.setMaster("local[*]")
- }
- sparkConf.set("spark.sql.legacy.timeParserPolicy", "LEGACY")
- setSparkConf(sparkConf)
- }
- sparkConf
- }
-
- private def setSparkConf(sparkConf: SparkConf): Unit = {
- ETLConfig
- .getSparkProperties(wfName)
- .foreach {
- case (key, value) =>
- sparkConf.set(key, value)
- ETLLogger.info(s"[Set spark config]: $key=$value")
- }
- }
-
- @inline def getHiveSparkSession(): SparkSession = sparkSession
-
- def getSparkInterpreter(local: Boolean,
- wfName: String,
- autoCloseSession: Boolean,
- etlDatabaseType: String,
- dataQualityCheckRules: Map[String, QualityCheckRule])
- : SparkWorkflowInterpreter = {
- ETLSparkSession.local = local
- ETLSparkSession.wfName = wfName
- ETLSparkSession.autoCloseSession = autoCloseSession
- val spark = ETLSparkSession.sparkSession
- UdfInitializer.init(spark)
- new SparkWorkflowInterpreter(spark, dataQualityCheckRules, QualityCheckAccessor.getInstance(etlDatabaseType))
- }
-
- def release(spark: SparkSession): Unit = {
- if (spark != null && autoCloseSession) {
- spark.stop()
- }
- }
-
- def autoPurgeHiveTable(table: String): Unit = {
- if (sparkSession.catalog.tableExists(table)) {
- purgeHiveTable match {
- case "true" => sparkSession.sql(s"ALTER TABLE $table SET TBLPROPERTIES('auto.purge' = 'true')")
- case "false" => sparkSession.sql(s"ALTER TABLE $table SET TBLPROPERTIES('auto.purge' = 'false')")
- case "none" => () //follow the original tblproperties of table.
- }
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/EmbeddedHive.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/EmbeddedHive.scala
deleted file mode 100644
index 6df8763..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/EmbeddedHive.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.spark.extension.UdfInitializer
-import ETLSparkSession.conf
-import ShutdownHookManager.registerShutdownDeleteDir
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars
-import org.apache.spark.sql.SparkSession
-
-import java.io.{File, IOException}
-import java.util.UUID
-
-// scalastyle:off
-object EmbeddedHive {
- lazy val tempDir: File = createTempDir()
- lazy val localMetastorePath: String = new File(tempDir, "metastore").getCanonicalPath
- lazy val localWarehousePath: String = new File(tempDir, "warehouse").getCanonicalPath
-
- def newBuilder(): SparkSession.Builder = {
- val builder = SparkSession.builder()
- // Long story with lz4 issues in 2.3+
- builder.config("spark.io.compression.codec", "snappy")
- // We have to mask all properties in hive-site.xml that relates to metastore
- // data source as we used a local metastore here.
- import org.apache.hadoop.hive.conf.HiveConf
- val hiveConfVars = HiveConf.ConfVars.values()
- val accessiableHiveConfVars = hiveConfVars.map(WrappedConfVar)
- accessiableHiveConfVars.foreach { confvar =>
- if (confvar.varname.contains("datanucleus") ||
- confvar.varname.contains("jdo")) {
- builder.config(confvar.varname, confvar.getDefaultExpr())
- }
- }
- builder.config(HiveConf.ConfVars.METASTOREURIS.varname, "")
- builder.config("javax.jdo.option.ConnectionURL",
- s"jdbc:derby:;databaseName=$localMetastorePath;create=true")
- builder.config("datanucleus.rdbms.datastoreAdapterClassName",
- "org.datanucleus.store.rdbms.adapter.DerbyAdapter")
- builder.config("spark.sql.warehouse.dir",
- localWarehousePath)
- builder
- }
-
- lazy val sparkWithEmbeddedHive: SparkSession = {
- val session =
- newBuilder()
- .master("local")
- .config("spark.sql.shuffle.partitions", "1")
- .config("spark.sql.legacy.timeParserPolicy", "LEGACY")
- .config("spark.sql.catalogImplementation", "hive")
- .config(conf())
- .enableHiveSupport()
- .getOrCreate()
- UdfInitializer.init(session)
- session
- }
-
-
- def createTempDir(root: String = System.getProperty("java.io.tmpdir")): File = {
- val dir = createDirectory(root)
- registerShutdownDeleteDir(dir)
- dir
- }
-
- def createDirectory(root: String): File = {
- var attempts = 0
- val maxAttempts = 10
- var dir: File = null
- while (dir == null) {
- attempts += 1
- if (attempts > maxAttempts) {
- throw new IOException(
- s"Failed to create a temp directory (under ${root}) after ${maxAttempts}")
- }
- try {
- dir = new File(root, "spark-" + UUID.randomUUID.toString)
- if (dir.exists() || !dir.mkdirs()) {
- dir = null
- }
- } catch {
- case _: SecurityException => dir = null;
- }
- }
-
- dir
- }
-}
-
-
-case class WrappedConfVar(cv: ConfVars) {
- val varname = cv.varname
-
- def getDefaultExpr(): String = cv.getDefaultExpr()
-}
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/Encoders.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/Encoders.scala
deleted file mode 100644
index be09413..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/Encoders.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.core.quality.DataQualityCheckResult
-import org.apache.spark.sql.{Encoder, Encoders}
-
-object Encoder extends Serializable {
- val dqEncoder: Encoder[DataQualityCheckResult] = Encoders.product[DataQualityCheckResult]
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/HttpStatusUtils.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/HttpStatusUtils.scala
deleted file mode 100644
index 7acf811..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/HttpStatusUtils.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import org.apache.http.StatusLine
-
-object HttpStatusUtils {
-
- def isSuccessful(statusLine: StatusLine): Boolean = {
- statusLine.getStatusCode/100 == 2
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JSchUtil.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JSchUtil.scala
deleted file mode 100644
index eae8020..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JSchUtil.scala
+++ /dev/null
@@ -1,131 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-// scalastyle:off
-
-import com.github.sharpdata.sharpetl.core.datasource.connection.ScpConnection
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-
-import java.io._
-// scalastyle:on
-import com.jcraft.jsch.{ChannelExec, JSch, Session}
-import org.apache.commons.io.IOUtils
-import org.apache.commons.lang.StringUtils
-
-import scala.collection.mutable.ArrayBuffer
-
-// $COVERAGE-OFF$
-object JSchUtil {
-
- def getSession(prefix: String): Session = {
- val jschConfig: ScpConnection = new ScpConnection(prefix)
- getSession(jschConfig)
- }
-
- def getSession(jschConfig: ScpConnection): Session = {
- getSession(
- jschConfig.getHost,
- jschConfig.getPort,
- jschConfig.getUser,
- jschConfig.getPassword
- )
- }
-
- def getSession(host: String,
- port: Int,
- user: String,
- password: String): Session = {
- val jsch = new JSch
- val session = jsch.getSession(user, host, port)
- session.setConfig("StrictHostKeyChecking", "no")
- if (password != null) session.setPassword(password)
- session.connect()
- session
- }
-
- def connect(session: Session, timeout: Int = 0): Unit = {
- if (!session.isConnected) {
- session.connect(timeout)
- }
- }
-
- def disconnect(session: Session): Unit = {
- session.disconnect()
- }
-
- def openChannelExec(session: Session): ChannelExec = {
- session.openChannel("exec").asInstanceOf[ChannelExec]
- }
-
- def closeChannelExec(channelExec: ChannelExec): Unit = {
- if (channelExec != null) {
- channelExec.disconnect()
- }
- }
-
- def exec(prefix: String, command: String): String = {
- exec(prefix, List(command)).head
- }
-
- def exec(prefix: String, commands: List[String]): List[String] = {
- val session: Session = getSession(prefix)
- val ret = exec(session, commands)
- ret
- }
-
- def exec(jschConfig: ScpConnection, command: String): String = {
- exec(jschConfig, List(command)).head
- }
-
- def exec(jschConfig: ScpConnection, commands: List[String]): List[String] = {
- val session: Session = getSession(jschConfig)
- try {
- exec(session, commands)
- } finally {
- disconnect(session)
- }
- }
-
- def exec(session: Session, command: String): List[String] = {
- exec(session, List(command))
- }
-
- def exec(session: Session, commands: List[String]): List[String] = {
- val ret: ArrayBuffer[String] = ArrayBuffer[String]()
- try {
- connect(session)
- for (command <- commands) {
- val channelExec: ChannelExec = openChannelExec(session)
- channelExec.setPty(true)
- ETLLogger.info("command : " + command)
- channelExec.setCommand(command)
- val inputStream: InputStream = channelExec.getInputStream
- val err: InputStream = channelExec.getErrStream
- channelExec.connect()
- ETLLogger.info("stdout : ")
- var output: String = ""
- val bufferSize = 1024
- val buf: Array[Byte] = new Array[Byte](bufferSize)
- var length: Int = 0
- do {
- length = inputStream.read(buf)
- if (length != -1) {
- output += new String(buf, 0, length)
- ETLLogger.info(new String(buf, 0, length))
- }
- } while (length != -1)
- ETLLogger.error("stderr : " + IOUtils.toString(err))
- ret += StringUtils.chop(output)
- closeChannelExec(channelExec)
- }
- ret.toList
- } catch {
- case e: Exception =>
- ETLLogger.error("Exec commands failed.", e)
- throw e
- } finally {
- disconnect(session)
- }
- }
-
-}
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JavaVersionChecker.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JavaVersionChecker.scala
deleted file mode 100644
index 89eb54f..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JavaVersionChecker.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import ETLSparkSession.sparkSession
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-
-import scala.util.control.NoStackTrace
-
-object JavaVersionChecker {
-
- final case class InvalidJavaVersionException(message: String) extends RuntimeException(message) with NoStackTrace
-
- def checkJavaVersion(): Unit = {
- val version = System.getProperty("java.version")
-
- version.substring(0, version.indexOf(".")) match {
- case "1" => if (!version.startsWith("1.8")) {
- throw InvalidJavaVersionException(s"Java $version lower than 1.8 is not supported")
- }
- case "9" | "10" => throw InvalidJavaVersionException(s"Non LTS java $version may not supported")
- case "11" => if (sparkSession.version.startsWith("2.")) {
- throw InvalidJavaVersionException(s"Currently using java $version, which may not supported by Apache Spark(<3.0)")
- }
- case _ => ETLLogger.error(s"Java $version higher than 11 may not supported!")
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JdbcUtil.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JdbcUtil.scala
deleted file mode 100644
index 85bf485..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/JdbcUtil.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import org.apache.spark.internal.Logging
-import org.apache.spark.sql.execution.datasources.jdbc.{DriverRegistry, DriverWrapper, JDBCOptions}
-
-import java.sql.{Connection, Driver, DriverManager}
-import scala.jdk.CollectionConverters._
-
-// scalastyle:off
-object JdbcUtils extends Logging {
- /**
- * Returns a factory for creating connections to the given JDBC URL.
- *
- * @param options - JDBC options that contains url, table and other information.
- */
- def createConnectionFactory(options: JDBCOptions): () => Connection = {
- val driverClass: String = options.driverClass
- () => {
- DriverRegistry.register(driverClass)
- val driver: Driver = DriverManager.getDrivers.asScala.collectFirst {
- case d: DriverWrapper if d.wrapped.getClass.getCanonicalName == driverClass => d
- case d if d.getClass.getCanonicalName == driverClass => d
- }.getOrElse {
- throw new IllegalStateException(
- s"Did not find registered driver with class $driverClass")
- }
- driver.connect(options.url, options.asConnectionProperties)
- }
- }
-}
-// scalastyle:on
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ReflectUtil.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ReflectUtil.scala
deleted file mode 100644
index 61d4b3d..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ReflectUtil.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.spark.transformation.Transformer
-import com.github.sharpdata.sharpetl.core.util.WorkflowReader.readLines
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.TransformerType
-import com.github.sharpdata.sharpetl.core.util.ReflectUtil.reflectObjectMethod
-import com.github.sharpdata.sharpetl.core.util.ScalaScriptCompiler
-import org.apache.spark.sql.DataFrame
-
-import scala.language.reflectiveCalls
-
-object ReflectUtil {
-
- def reflectClassMethod(className: String, args: Any*): Any = {
- val c = Class.forName(className)
- .newInstance
- // scalastyle:off
- .asInstanceOf[ {def entrypoint(config: Any): DataFrame}]
- // scalastyle:on
- c.entrypoint(args)
- }
-
- def compileAndCallObjectMethod(className: String, methodName: String, args: Any*): Any = {
- val objectName = className.substring(className.lastIndexOf(".") + 1)
- val transformer = ScalaScriptCompiler.compileTransformer(readLines(objectName).mkString("\n"))
- .asInstanceOf[Transformer]
- if (args.size == 1) {
- transformer.transform(args.head.asInstanceOf[Map[String, String]])
- } else {
- transformer.transform(args(0).asInstanceOf[DataFrame], args(1).asInstanceOf[WorkflowStep], args(2).asInstanceOf[Variables])
- }
- }
-
- def execute(className: String,
- methodName: String,
- transformerType: String,
- args: Any*): Any = {
- transformerType match {
- case TransformerType.OBJECT_TYPE => reflectObjectMethod(className, methodName, args: _*)
- case TransformerType.CLASS_TYPE => reflectClassMethod(className, args: _*)
- case TransformerType.DYNAMIC_OBJECT_TYPE => compileAndCallObjectMethod(className, methodName, args: _*)
- case _ => throw new RuntimeException(s"Unknown transformer type: $transformerType")
- }
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ShutdownHookManager.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ShutdownHookManager.scala
deleted file mode 100644
index 2e0fc87..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/ShutdownHookManager.scala
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import ShutdownHookManager.logUncaughtExceptions
-import org.apache.hadoop.fs.FileSystem
-import org.apache.spark.internal.Logging
-import org.apache.spark.network.util.JavaUtils
-
-import java.io.File
-import java.util.PriorityQueue
-import scala.util.Try
-import scala.util.control.ControlThrowable
-
-/**
- * Taken from [[org.apache.spark.util.ShutdownHookManager]]
- */
-// $COVERAGE-OFF$
-// scalastyle:off
-object ShutdownHookManager extends Logging {
- val DEFAULT_SHUTDOWN_PRIORITY = 100
-
- /**
- * The shutdown priority of the SparkContext instance. This is lower than the default
- * priority, so that by default hooks are run before the context is shut down.
- */
- val SPARK_CONTEXT_SHUTDOWN_PRIORITY = 50
-
- /**
- * The shutdown priority of temp directory must be lower than the SparkContext shutdown
- * priority. Otherwise cleaning the temp directories while Spark jobs are running can
- * throw undesirable errors at the time of shutdown.
- */
- val TEMP_DIR_SHUTDOWN_PRIORITY = 25
-
- private lazy val shutdownHooks = {
- val manager = new SparkShutdownHookManager()
- manager.install()
- manager
- }
-
- private val shutdownDeletePaths = new scala.collection.mutable.HashSet[String]()
-
- // Add a shutdown hook to delete the temp dirs when the JVM exits
- ETLLogger.info("Adding shutdown hook") // force eager creation of logger
- addShutdownHook(TEMP_DIR_SHUTDOWN_PRIORITY) { () =>
- ETLLogger.info("Shutdown hook called")
- // we need to materialize the paths to delete because deleteRecursively removes items from
- // shutdownDeletePaths as we are traversing through it.
- shutdownDeletePaths.toArray.foreach { dirPath =>
- try {
- ETLLogger.info("Deleting directory " + dirPath)
- deleteRecursively(new File(dirPath))
- } catch {
- case e: Exception => ETLLogger.error(s"Exception while deleting Spark temp dir: $dirPath", e)
- }
- }
- }
-
- // Register the path to be deleted via shutdown hook
- def registerShutdownDeleteDir(file: File) {
- val absolutePath = file.getAbsolutePath()
- shutdownDeletePaths.synchronized {
- shutdownDeletePaths += absolutePath
- }
- }
-
- // Remove the path to be deleted via shutdown hook
- def removeShutdownDeleteDir(file: File) {
- val absolutePath = file.getAbsolutePath()
- shutdownDeletePaths.synchronized {
- shutdownDeletePaths.remove(absolutePath)
- }
- }
-
- // Is the path already registered to be deleted via a shutdown hook ?
- def hasShutdownDeleteDir(file: File): Boolean = {
- val absolutePath = file.getAbsolutePath()
- shutdownDeletePaths.synchronized {
- shutdownDeletePaths.contains(absolutePath)
- }
- }
-
- // Note: if file is child of some registered path, while not equal to it, then return true;
- // else false. This is to ensure that two shutdown hooks do not try to delete each others
- // paths - resulting in IOException and incomplete cleanup.
- def hasRootAsShutdownDeleteDir(file: File): Boolean = {
- val absolutePath = file.getAbsolutePath()
- val retval = shutdownDeletePaths.synchronized {
- shutdownDeletePaths.exists { path =>
- !absolutePath.equals(path) && absolutePath.startsWith(path)
- }
- }
- if (retval) {
- ETLLogger.info("path = " + file + ", already present as root for deletion.")
- }
- retval
- }
-
- /**
- * Detect whether this thread might be executing a shutdown hook. Will always return true if
- * the current thread is a running a shutdown hook but may spuriously return true otherwise (e.g.
- * if System.exit was just called by a concurrent thread).
- *
- * Currently, this detects whether the JVM is shutting down by Runtime#addShutdownHook throwing
- * an IllegalStateException.
- */
- def inShutdown(): Boolean = {
- try {
- val hook = new Thread {
- override def run() {}
- }
- // scalastyle:off runtimeaddshutdownhook
- Runtime.getRuntime.addShutdownHook(hook)
- // scalastyle:on runtimeaddshutdownhook
- Runtime.getRuntime.removeShutdownHook(hook)
- } catch {
- case ise: IllegalStateException => return true
- }
- false
- }
-
- /**
- * Adds a shutdown hook with default priority.
- *
- * @param hook The code to run during shutdown.
- * @return A handle that can be used to unregister the shutdown hook.
- */
- def addShutdownHook(hook: () => Unit): AnyRef = {
- addShutdownHook(DEFAULT_SHUTDOWN_PRIORITY)(hook)
- }
-
- /**
- * Adds a shutdown hook with the given priority. Hooks with higher priority values run
- * first.
- *
- * @param hook The code to run during shutdown.
- * @return A handle that can be used to unregister the shutdown hook.
- */
- def addShutdownHook(priority: Int)(hook: () => Unit): AnyRef = {
- shutdownHooks.add(priority, hook)
- }
-
- /**
- * Remove a previously installed shutdown hook.
- *
- * @param ref A handle returned by `addShutdownHook`.
- * @return Whether the hook was removed.
- */
- def removeShutdownHook(ref: AnyRef): Boolean = {
- shutdownHooks.remove(ref)
- }
-
-
- def deleteRecursively(file: File): Unit = {
- if (file != null) {
- JavaUtils.deleteRecursively(file)
- ShutdownHookManager.removeShutdownDeleteDir(file)
- }
- }
-
- def logUncaughtExceptions[T](f: => T): T = {
- try {
- f
- } catch {
- case ct: ControlThrowable =>
- throw ct
- case t: Throwable =>
- ETLLogger.error(s"Uncaught exception in thread ${Thread.currentThread().getName}", t)
- throw t
- }
- }
-
-}
-
-class SparkShutdownHookManager {
-
- private val hooks = new PriorityQueue[SparkShutdownHook]()
- @volatile private var shuttingDown = false
-
- /**
- * Install a hook to run at shutdown and run all registered hooks in order.
- */
- def install(): Unit = {
- val hookTask = new Runnable() {
- override def run(): Unit = runAll()
- }
- org.apache.hadoop.util.ShutdownHookManager.get().addShutdownHook(
- hookTask, FileSystem.SHUTDOWN_HOOK_PRIORITY + 30)
- }
-
- def runAll(): Unit = {
- shuttingDown = true
- var nextHook: SparkShutdownHook = null
- while ( {
- nextHook = hooks.synchronized {
- hooks.poll()
- };
- nextHook != null
- }) {
- Try(logUncaughtExceptions(nextHook.run()))
- }
- }
-
- def add(priority: Int, hook: () => Unit): AnyRef = {
- hooks.synchronized {
- if (shuttingDown) {
- throw new IllegalStateException("Shutdown hooks cannot be modified during shutdown.")
- }
- val hookRef = new SparkShutdownHook(priority, hook)
- hooks.add(hookRef)
- hookRef
- }
- }
-
- def remove(ref: AnyRef): Boolean = {
- hooks.synchronized {
- hooks.remove(ref)
- }
- }
-
-}
-
-class SparkShutdownHook(private val priority: Int, hook: () => Unit)
- extends Comparable[SparkShutdownHook] {
-
- override def compareTo(other: SparkShutdownHook): Int = other.priority.compareTo(priority)
-
- def run(): Unit = hook()
-
-}
-// scalastyle:on
-// $COVERAGE-ON$
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/SparkCatalogUtil.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/SparkCatalogUtil.scala
deleted file mode 100644
index c9207c4..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/SparkCatalogUtil.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import ETLSparkSession.sparkSession
-import org.apache.spark.sql.functions._
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-
-object SparkCatalogUtil {
- def getPartitionColNames(dbName: String, tableName: String): Array[String] = {
- sparkSession.catalog.listColumns(dbName, tableName)
- .where(col("ispartition") === true)
- .select("name")
- .collect()
- .map(_.getAs[String]("name"))
- }
-
- def getNonePartitionColNames(dbName: String, tableName: String): Array[String] = {
- sparkSession.catalog.listColumns(dbName, tableName)
- .where(col("ispartition") === false)
- .select("name")
- .collect()
- .map(_.getAs[String]("name"))
- }
-
- def getAllColNames(dbName: String, tableName: String): Array[String] = {
- Array.concat(
- getNonePartitionColNames(dbName, tableName),
- getPartitionColNames(dbName, tableName)
- )
- }
-
- def getAllColNamesOfTempTable(tableName: String): Array[String] = {
- sparkSession.catalog.listColumns(tableName)
- .select("name")
- .collect()
- .map(_.getAs[String]("name"))
- }
-
- def isPartitionDataExists(tableName: String, partitionQuery: String): Boolean = {
- !sparkSession.sql(s"select * from $tableName where $partitionQuery limit 1").isEmpty
- }
-}
diff --git a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/VariablesUtil.scala b/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/VariablesUtil.scala
deleted file mode 100644
index 0923cc7..0000000
--- a/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/utils/VariablesUtil.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.utils
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import org.apache.spark.sql.DataFrame
-
-object VariablesUtil {
-
- def setVariables(
- df: DataFrame,
- variables: Variables): Unit = {
- if (!df.rdd.isEmpty) {
- val fieldNames = df.schema.fieldNames
- val row = df.first()
- fieldNames.foreach(fieldName => {
- val index = row.fieldIndex(fieldName)
- val fieldValue = if (row.isNullAt(index)) {
- "null"
- } else {
- row.get(index).toString
- }
- val key = if (fieldName.matches("^#\\{.+\\}$")) {
- fieldName
- } else {
- String.format("${%s}", fieldName)
- }
- variables += key -> fieldValue
- })
- }
- ETLLogger.info(s"Variables: $variables")
- }
-
-}
diff --git a/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala b/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
deleted file mode 100644
index a60cddd..0000000
--- a/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasoruce.jdbc
-
-import org.apache.spark.TaskContext
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-object AbstractJdbcDataSource {
- def addTaskCompletionListener(close: () => Unit): TaskContext = {
- TaskContext.get().addTaskCompletionListener { context: TaskContext => close() }
- }
-}
diff --git a/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala b/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala
deleted file mode 100644
index 7ff72ca..0000000
--- a/spark/src/main/spark_2.3_scala_211/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension.generateFunction
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, ScalaReflection}
-import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
-import org.apache.spark.sql.types.DataType
-
-import scala.util.Try
-
-object UDFExtension {
-
- def registerUDF(
- spark: SparkSession,
- classType: String,
- name: String,
- className: String,
- methodName: String,
- args: Any*): Unit = {
- val (fun, parameterTypes, returnType) = getFunctionInfo(
- classType,
- className,
- methodName,
- args: _*
- )
-
- def builder(e: Seq[Expression]) = ScalaUDF(
- function = fun,
- dataType = returnType,
- children = e,
- inputTypes = if (parameterTypes.contains(None)) Nil else parameterTypes.map(_.get),
- udfName = Some(name)
- )
-
- val functionIdentifier = new FunctionIdentifier(name)
- spark.sessionState.functionRegistry.registerFunction(functionIdentifier, builder)
- }
-
- def getFunctionInfo(
- classType: String,
- className: String,
- methodName: String,
- args: Any*): (AnyRef, List[Option[DataType]], DataType) = {
- val classInfo = UDFConfigExtension.apply(classType, className, args: _*)
- val methodSymbol = classInfo.methodSymbols(methodName)
- // 此处反射生成 UDF 时不考虑柯里化
- val params = methodSymbol
- .paramLists
- .head
- val parameterTypes = params
- .map(param => Try(ScalaReflection.schemaFor(param.typeSignature).dataType).toOption)
- val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
- val fun = generateFunction(
- classType,
- className,
- Some(methodName),
- parameterTypes.length,
- args: _*
- )
- (fun, parameterTypes, returnType)
- }
-
-}
diff --git a/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala b/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
deleted file mode 100644
index dec52ce..0000000
--- a/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasoruce.jdbc
-
-import org.apache.spark.TaskContext
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-object AbstractJdbcDataSource {
- def addTaskCompletionListener(close: () => Unit): TaskContext = {
- TaskContext.get().addTaskCompletionListener[Unit] { context: TaskContext => close() }
- }
-}
diff --git a/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala b/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
deleted file mode 100644
index ced6d5d..0000000
--- a/spark/src/main/spark_2.4_scala_211/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, ScalaReflection}
-import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
-import org.apache.spark.sql.types.DataType
-
-import scala.util.Try
-
-object UDFExtension {
-
- def registerUDF(
- spark: SparkSession,
- classType: String,
- name: String,
- className: String,
- methodName: String,
- args: Any*): Unit = {
- val (fun, parameterTypes, returnType) = getFunctionInfo(
- classType,
- className,
- methodName,
- args: _*
- )
-
- def builder(e: Seq[Expression]) = ScalaUDF(
- function = fun,
- dataType = returnType,
- children = e,
- inputsNullSafe = Nil, // set Nil so UDf could process null values
- inputTypes = if (parameterTypes.contains(None)) Nil else parameterTypes.map(_.get),
- udfName = Some(name)
- )
-
- val functionIdentifier = new FunctionIdentifier(name)
- spark.sessionState.functionRegistry.registerFunction(functionIdentifier, builder)
- }
-
- def getFunctionInfo(
- classType: String,
- className: String,
- methodName: String,
- args: Any*): (AnyRef, List[Option[DataType]], DataType) = {
- val classInfo = UDFConfigExtension.apply(classType, className, args: _*)
- val methodSymbol = classInfo.methodSymbols(methodName)
- // 此处反射生成 UDF 时不考虑柯里化
- val params = methodSymbol
- .paramLists
- .head
- val parameterTypes = params
- .map(param => Try(ScalaReflection.schemaFor(param.typeSignature).dataType).toOption)
- val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
- val fun = UDFConfigExtension.generateFunction(
- classType,
- className,
- Some(methodName),
- parameterTypes.length,
- args: _*
- )
- (fun, parameterTypes, returnType)
- }
-
-}
diff --git a/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/datasoruce/jdbc/JdbcDataSource.scala b/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/datasoruce/jdbc/JdbcDataSource.scala
deleted file mode 100644
index 8f2771f..0000000
--- a/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/datasoruce/jdbc/JdbcDataSource.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasoruce.jdbc
-
-import org.apache.spark.TaskContext
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-object AbstractJdbcDataSource {
- def addTaskCompletionListener(close: () => Unit): TaskContext = {
- TaskContext.get().addTaskCompletionListener[Unit] { context: TaskContext => close() }
- }
-
-}
diff --git a/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala b/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala
deleted file mode 100644
index a428254..0000000
--- a/spark/src/main/spark_2.4_scala_212/com/github/sharpdata/sharpetl/spark/extension/UDFExtension.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, ScalaReflection}
-import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
-import org.apache.spark.sql.types.DataType
-
-import scala.util.Try
-
-object UDFExtension extends Serializable {
-
- def registerUDF(
- spark: SparkSession,
- classType: String,
- name: String,
- className: String,
- methodName: String,
- args: Any*): Unit = {
- val (fun, parameterTypes, returnType) = getFunctionInfo(
- classType,
- className,
- methodName,
- args: _*
- )
-
- def builder(e: Seq[Expression]) = ScalaUDF(
- function = fun,
- dataType = returnType,
- children = e,
- inputsNullSafe = Nil, // set Nil so UDf could process null values
- inputTypes = if (parameterTypes.contains(None)) Nil else parameterTypes.map(_.get),
- udfName = Some(name)
- )
-
- val functionIdentifier = new FunctionIdentifier(name)
- spark.sessionState.functionRegistry.registerFunction(functionIdentifier, builder)
- }
-
- def getFunctionInfo(
- classType: String,
- className: String,
- methodName: String,
- args: Any*): (AnyRef, List[Option[DataType]], DataType) = {
- val classInfo = UDFConfigExtension.apply(classType, className, args: _*)
- val methodSymbol = classInfo.methodSymbols(methodName)
- // 此处反射生成 UDF 时不考虑柯里化
- val params = methodSymbol
- .paramLists
- .head
- val parameterTypes = params
- .map(param => Try(ScalaReflection.schemaFor(param.typeSignature).dataType).toOption)
- val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
- val fun = UDFConfigExtension.generateFunction(
- classType,
- className,
- Some(methodName),
- parameterTypes.length,
- args: _*
- )
- (fun, parameterTypes, returnType)
- }
-
-}
diff --git a/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala b/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
deleted file mode 100644
index dec52ce..0000000
--- a/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasoruce.jdbc
-
-import org.apache.spark.TaskContext
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-object AbstractJdbcDataSource {
- def addTaskCompletionListener(close: () => Unit): TaskContext = {
- TaskContext.get().addTaskCompletionListener[Unit] { context: TaskContext => close() }
- }
-}
diff --git a/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala b/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
deleted file mode 100644
index 4d56601..0000000
--- a/spark/src/main/spark_3.1_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension.generateFunction
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, ScalaReflection}
-import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
-import org.apache.spark.sql.types.{DataType, StructField, StructType}
-
-import scala.util.Try
-
-object UDFExtension {
-
- def registerUDF(
- spark: SparkSession,
- classType: String,
- name: String,
- className: String,
- methodName: String,
- args: Any*): Unit = {
- val (fun, inputEncoders, returnType) = getFunctionInfo(
- classType,
- className,
- methodName,
- args: _*
- )
-
- def builder(e: Seq[Expression]) = ScalaUDF(
- function = fun,
- dataType = returnType,
- children = e,
- inputEncoders = if (inputEncoders.contains(None)) Nil else inputEncoders,
- udfName = Some(name)
- )
-
- val functionIdentifier = new FunctionIdentifier(name)
- spark.sessionState.functionRegistry.registerFunction(functionIdentifier, builder)
- }
-
- def getFunctionInfo(
- classType: String,
- className: String,
- methodName: String,
- args: Any*): (AnyRef, Seq[Option[ExpressionEncoder[_]]], DataType) = {
- val classInfo = UDFConfigExtension.apply(classType, className, args: _*)
- val methodSymbol = classInfo.methodSymbols(methodName)
- // 此处反射生成 UDF 时不考虑柯里化
- val params = methodSymbol
- .paramLists
- .head
- val parameterTypes = params
- .map(param => Try(ScalaReflection.schemaFor(param.typeSignature).dataType).toOption)
-
- val schema = StructType(params.map(param => {
- StructField(param.fullName, ScalaReflection.schemaFor(param.typeSignature).dataType)
- }))
-
- //TODO: 可能不工作
- val inputEncoders = Seq(Option(RowEncoder.apply(schema).resolveAndBind()))
-
- val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
- val fun = generateFunction(
- classType,
- className,
- Some(methodName),
- parameterTypes.length,
- args: _*
- )
- (fun, Nil, returnType)
- }
-
-
-}
diff --git a/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala b/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
deleted file mode 100644
index dec52ce..0000000
--- a/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/datasource/jdbc/JdbcDataSource.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasoruce.jdbc
-
-import org.apache.spark.TaskContext
-import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-
-object AbstractJdbcDataSource {
- def addTaskCompletionListener(close: () => Unit): TaskContext = {
- TaskContext.get().addTaskCompletionListener[Unit] { context: TaskContext => close() }
- }
-}
diff --git a/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala b/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
deleted file mode 100644
index 9947be0..0000000
--- a/spark/src/main/spark_3.2_scala_212/com/sharpdata/sharpetl/spark/extension/UDFExtension.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.extension
-
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension
-import com.github.sharpdata.sharpetl.spark.datasource.UDFConfigExtension.generateFunction
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, ScalaReflection}
-import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
-import org.apache.spark.sql.types.{DataType, StructField, StructType}
-
-import scala.util.Try
-
-object UDFExtension {
-
- def registerUDF(
- spark: SparkSession,
- classType: String,
- name: String,
- className: String,
- methodName: String,
- args: Any*): Unit = {
- val (fun, inputEncoders, returnType) = getFunctionInfo(
- classType,
- className,
- methodName,
- args: _*
- )
-
- def builder(e: Seq[Expression]) = ScalaUDF(
- function = fun,
- dataType = returnType,
- children = e,
- inputEncoders = if (inputEncoders.contains(None)) Nil else inputEncoders,
- udfName = Some(name)
- )
-
- val functionIdentifier = new FunctionIdentifier(name)
- spark.sessionState.functionRegistry.registerFunction(functionIdentifier, builder _, "scala_udf")
- }
-
- def getFunctionInfo(
- classType: String,
- className: String,
- methodName: String,
- args: Any*): (AnyRef, Seq[Option[ExpressionEncoder[_]]], DataType) = {
- val classInfo = UDFConfigExtension.apply(classType, className, args: _*)
- val methodSymbol = classInfo.methodSymbols(methodName)
- // 此处反射生成 UDF 时不考虑柯里化
- val params = methodSymbol
- .paramLists
- .head
- val parameterTypes = params
- .map(param => Try(ScalaReflection.schemaFor(param.typeSignature).dataType).toOption)
-
- val schema = StructType(params.map(param => {
- StructField(param.fullName, ScalaReflection.schemaFor(param.typeSignature).dataType)
- }))
-
- //TODO: 可能不工作
- //val inputEncoders = Seq(Option(RowEncoder.apply(schema).resolveAndBind()))
-
- val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
- val fun = generateFunction(
- classType,
- className,
- Some(methodName),
- parameterTypes.length,
- args: _*
- )
- (fun, Nil, returnType)
- }
-
-
-}
diff --git a/spark/src/test/resources/application-delta.properties b/spark/src/test/resources/application-delta.properties
deleted file mode 100644
index a59e498..0000000
--- a/spark/src/test/resources/application-delta.properties
+++ /dev/null
@@ -1,38 +0,0 @@
-etl.workflow.path=tasks
-etl.default.delta.base.path=/tmp/delta
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-spark.default.spark.sql.catalogImplementation=hive
-spark.default.spark.sql.legacy.createHiveTableByDefault=false
-spark.default.spark.sql.hive.convertCTAS=true
-spark.default.spark.sql.cbo.enabled=true
-spark.default.spark.sql.adaptive.enabled=true
-spark.default.spark.sql.adaptive.logLevel=info
-spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
-spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
-spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
-spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
-spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
-spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
-spark.default.spark.sql.autoBroadcastJoinThreshold=-1
-spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
-spark.default.hive.exec.dynamic.partition=true
-spark.default.hive.exec.dynamic.partition.mode=nonstrict
-spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
-
-flyway.driver=com.github.sharpdata.sharpetl.spark.extra.driver.SparkJdbcDriver
-flyway.url=jdbc:spark_sharp_etl://localhost/sharp_etl
-flyway.username=admin
-flyway.password=admin
-
-
-etl.default.jobId.column=job_id
-etl.default.jobTime.column=job_time
-
-from_file_path=true
-
diff --git a/spark/src/test/resources/application-test.properties b/spark/src/test/resources/application-test.properties
deleted file mode 100644
index fb45a89..0000000
--- a/spark/src/test/resources/application-test.properties
+++ /dev/null
@@ -1,50 +0,0 @@
-etl.workflow.path=tasks
-etl.default.delta.base.path=/tmp/delta
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-spark.default.spark.sql.cbo.enabled=true
-spark.default.spark.sql.adaptive.enabled=true
-spark.default.spark.sql.adaptive.logLevel=info
-spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
-spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
-spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
-spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
-spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
-spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
-spark.default.spark.sql.autoBroadcastJoinThreshold=-1
-spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
-spark.default.hive.exec.dynamic.partition=true
-spark.default.hive.exec.dynamic.partition.mode=nonstrict
-spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
-
-flyway.driver=com.mysql.cj.jdbc.Driver
-flyway.url=jdbc:mysql://localhost:2333/sharp_etl?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&autoReconnect=true
-flyway.username=admin
-flyway.password=ENC(8CW2DkIPNyDX+d/p1/fXyw==)
-
-int_test.mysql.url=jdbc:mysql://localhost:2334/int_test
-int_test.mysql.driver=com.mysql.cj.jdbc.Driver
-int_test.mysql.user=admin
-int_test.mysql.password=ENC(AUmnp9GPe51M6fN4ExVc+A==)
-int_test.mysql.fetchsize=1000
-
-postgres.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
-postgres.postgres.user=postgres
-postgres.postgres.password=postgres
-postgres.postgres.driver=org.postgresql.Driver
-postgres.postgres.fetchsize=10
-
-etl.default.jobId.column=job_id
-etl.default.jobTime.column=job_time
-
-workday.http.header.Authorization=Basic 123456
-workday.http.proxy.host=localhost
-workday.http.proxy.port=8080
-
-from_file_path=true
-
diff --git a/spark/src/test/resources/application.properties b/spark/src/test/resources/application.properties
deleted file mode 100644
index 239e91e..0000000
--- a/spark/src/test/resources/application.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-etl.workflow.path=tasks
-etl.default.delta.base.path=/tmp/delta
-
-encrypt.algorithm=PBEWithMD5AndDES
-encrypt.password=cGFzc3dvcmQ=
-
-spark.default.spark.sql.cbo.enabled=true
-spark.default.spark.sql.adaptive.enabled=true
-spark.default.spark.sql.adaptive.logLevel=info
-spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
-spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
-spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
-spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
-spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.enabled=true
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
-spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
-spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
-spark.default.spark.sql.autoBroadcastJoinThreshold=-1
-spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
-spark.default.hive.exec.dynamic.partition=true
-spark.default.hive.exec.dynamic.partition.mode=nonstrict
-spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
-
-psi.postgres.driver=org.postgresql.Driver
-psi.postgres.fetchsize=10
-psi.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="test"
-psi.postgres.user=root
-psi.postgres.password=root
-
-kudu.table.prefix=impala::
-
-flyway.driver=com.mysql.cj.jdbc.Driver
-flyway.url=jdbc:mysql://localhost:2333/sharp_etl?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&autoReconnect=true
-flyway.username=admin
-flyway.password=ENC(8CW2DkIPNyDX+d/p1/fXyw==)
-
-int_test.mysql.url=jdbc:mysql://localhost:2334/int_test
-int_test.mysql.driver=com.mysql.cj.jdbc.Driver
-int_test.mysql.user=admin
-int_test.mysql.password=ENC(AUmnp9GPe51M6fN4ExVc+A==)
-int_test.mysql.fetchsize=1000
\ No newline at end of file
diff --git a/spark/src/test/resources/data.tar.gz b/spark/src/test/resources/data.tar.gz
deleted file mode 100644
index 8cf8c70..0000000
Binary files a/spark/src/test/resources/data.tar.gz and /dev/null differ
diff --git a/spark/src/test/resources/data2.tar.gz b/spark/src/test/resources/data2.tar.gz
deleted file mode 100644
index 8cf8c70..0000000
Binary files a/spark/src/test/resources/data2.tar.gz and /dev/null differ
diff --git a/spark/src/test/resources/etl.key b/spark/src/test/resources/etl.key
deleted file mode 100644
index aefe06c..0000000
Binary files a/spark/src/test/resources/etl.key and /dev/null differ
diff --git a/spark/src/test/resources/log4j.properties b/spark/src/test/resources/log4j.properties
deleted file mode 100644
index 3b3ca92..0000000
--- a/spark/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,47 +0,0 @@
-# ETLLogger 日志单独输出
-log4j.logger.ETLLogger=INFO, console, ETLLogger, infoRollingFile
-log4j.additivity.ETLLogger=false
-log4j.appender.ETLLogger=org.apache.log4j.RollingFileAppender
-log4j.appender.ETLLogger.File=./logs/ETLLogger.log
-log4j.appender.ETLLogger.Append=true
-log4j.appender.ETLLogger.MaxFileSize=16MB
-log4j.appender.ETLLogger.MaxBackupIndex=3
-log4j.appender.ETLLogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.ETLLogger.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p - %m%n
-log4j.appender.ETLLogger.Encoding=UTF-8
-
-# 全局日志
-log4j.rootLogger=ERROR, infoRollingFile, console
-
-# 控制台输出
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.Threshold=INFO
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.console.Encoding=UTF-8
-
-# info 级别滚动文件日志输出
-log4j.appender.infoRollingFile=org.apache.log4j.RollingFileAppender
-log4j.appender.infoRollingFile.File=./logs/info.log
-log4j.appender.infoRollingFile.Threshold=INFO
-log4j.appender.infoRollingFile.Append=true
-log4j.appender.infoRollingFile.MaxFileSize=16MB
-log4j.appender.infoRollingFile.MaxBackupIndex=3
-log4j.appender.infoRollingFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.infoRollingFile.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss} %-5p [%c] - %m%n
-log4j.appender.infoRollingFile.Encoding=UTF-8
-
-shell.log.level=WARN
-log4j.logger.org.spark-project.jetty=WARN
-log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.parquet=ERROR
-log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
-log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-log4j.logger.org.apache.spark.repl.Main=${shell.log.level}
-log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${shell.log.level}
-log4j.logger.org.apache.spark.ContextCleaner=ERROR
-log4j.logger.org.apache.hadoop.mapreduce=ERROR
-log4j.logger.org.apache.hadoop.hive=ERROR
diff --git a/spark/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/spark/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
deleted file mode 100644
index ca6ee9c..0000000
--- a/spark/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
+++ /dev/null
@@ -1 +0,0 @@
-mock-maker-inline
\ No newline at end of file
diff --git a/spark/src/test/resources/quality-check.yaml b/spark/src/test/resources/quality-check.yaml
deleted file mode 100644
index 6ea23b6..0000000
--- a/spark/src/test/resources/quality-check.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
-- dataCheckType: power null check(error)
- rule: powerNullCheck(`$column`)
- errorType: error
-- dataCheckType: power null check(warn)
- rule: powerNullCheck(`$column`)
- errorType: warn
-- dataCheckType: empty check(warn)
- rule: $column = ''
- errorType: warn
-- dataCheckType: null check
- rule: $column IS NULL
- errorType: error
-- dataCheckType: duplicated check
- rule: UDR.com.github.sharpdata.sharpetl.spark.quality.udr.DuplicatedCheck
- errorType: error
\ No newline at end of file
diff --git a/spark/src/test/resources/tasks/daily_jobs_summary_report_test.sql b/spark/src/test/resources/tasks/daily_jobs_summary_report_test.sql
deleted file mode 100644
index 66fff99..0000000
--- a/spark/src/test/resources/tasks/daily_jobs_summary_report_test.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- workflow=daily_jobs_summary_report_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.DailyJobsSummaryReportTransformer
--- methodName=transform
--- transformerType=object
--- datasource=hive,postgres
--- target=do_nothing
diff --git a/spark/src/test/resources/tasks/from_step_id.sql b/spark/src/test/resources/tasks/from_step_id.sql
deleted file mode 100644
index c4aff39..0000000
--- a/spark/src/test/resources/tasks/from_step_id.sql
+++ /dev/null
@@ -1,19 +0,0 @@
--- workflow=from_step_id
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=transformation
--- className=some.thing.does.not.exists
--- methodName=transform
--- transformerType=dynamic_object
--- target=temp
--- tableName=`dynamic_tmp_transformer_result_table`
-
-
--- step=2
--- source=temp
--- target=temp
--- tableName=do_nothing_table
-SELECT 'success';
diff --git a/spark/src/test/resources/tasks/http_datasource.sql b/spark/src/test/resources/tasks/http_datasource.sql
deleted file mode 100644
index c54bba3..0000000
--- a/spark/src/test/resources/tasks/http_datasource.sql
+++ /dev/null
@@ -1,57 +0,0 @@
--- workflow=http_transformation_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=variables
-select from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`,
- date_format(to_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), "YYYY-MM-dd'T'HH:mm:ssXXX") as `START_TIME_TIMESTAMP`,
- date_format(to_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), "YYYY-MM-dd'T'HH:mm:ssXXX") as `START_TIME_TIMESTAMP`;
-
-
--- step=2
--- source=http
--- url=http://localhost:1080/get_workday?satrt=${START_TIME_TIMESTAMP}&end=${START_TIME_TIMESTAMP}
--- target=temp
--- tableName=`source_data`
-
-
--- step=3
--- source=temp
--- tableName='source_data'
--- target=temp
--- tableName=`source_data_workday`
--- writeMode=append
-with `workday_temp` as (select explode(from_json(value,
- 'struct>>').Report_Entry)
- as Report_Entry
- from `source_data`)
-
-
-select Report_Entry.`a` as `a`,
- Report_Entry.`b` as `b`,
- Report_Entry.`c` as `c`,
- Report_Entry.`d` as `d`,
- Report_Entry.`e` as `e`,
- Report_Entry.`f` as `f`,
- Report_Entry.`g` as `g`,
- Report_Entry.`h` as `h`,
- Report_Entry.`i` as `i`,
- Report_Entry.`j` as `j`,
- Report_Entry.`k` as `k`,
- Report_Entry.`l` as `l`,
- Report_Entry.`m` as `m`,
- Report_Entry.`n` as `n`,
- Report_Entry.`o` as `o`,
- Report_Entry.`p` as `p`,
- Report_Entry.`q` as `q`,
- '${YEAR}' as `year`,
- '${MONTH}' as `month`,
- '${DAY}' as `day`,
- '${HOUR}' as `hour`
-from `workday_temp`;
\ No newline at end of file
diff --git a/spark/src/test/resources/tasks/http_datasource_to_variables.sql b/spark/src/test/resources/tasks/http_datasource_to_variables.sql
deleted file mode 100644
index 21855f9..0000000
--- a/spark/src/test/resources/tasks/http_datasource_to_variables.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- workflow=http_transformation_to_variables_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=temp
--- target=variables
-select from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`,
- date_format(to_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), "YYYY-MM-dd'T'HH:mm:ssXXX") as `START_TIME_TIMESTAMP`,
- date_format(to_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), "YYYY-MM-dd'T'HH:mm:ssXXX") as `START_TIME_TIMESTAMP`;
-
-
--- step=2
--- source=http
--- url=http://localhost:1080/get_workday?satrt=${START_TIME_TIMESTAMP}&end=${START_TIME_TIMESTAMP}
--- fieldName=types
--- jsonPath=$.phoneNumbers[*].type
--- splitBy=__
--- target=variables
-
-
--- step=3
--- source=temp
--- target=temp
--- tableName=`target_data_types`
--- writeMode=append
-select '${types}' as `types`;
\ No newline at end of file
diff --git a/spark/src/test/resources/tasks/http_loop_request.sql b/spark/src/test/resources/tasks/http_loop_request.sql
deleted file mode 100644
index 4aef011..0000000
--- a/spark/src/test/resources/tasks/http_loop_request.sql
+++ /dev/null
@@ -1,24 +0,0 @@
--- workflow=default_to_temp_source
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=temp
--- tableName=temp_table
-select 'test_1' as `table_name`
-union all
-select 'test_2' as `table_name`
-union all
-select 'test_3' as `table_name`
-union all
-select 'test_4' as `table_name`
-
--- step=2
--- source=http
--- url=http://localhost:1080/get_from_table/${table_name}
--- fieldName=result
--- target=temp
--- tableName=target_temp_table
--- loopOver=temp_table
diff --git a/spark/src/test/resources/tasks/int_test/auto_inc_id_mode.sql b/spark/src/test/resources/tasks/int_test/auto_inc_id_mode.sql
deleted file mode 100644
index 43b1355..0000000
--- a/spark/src/test/resources/tasks/int_test/auto_inc_id_mode.sql
+++ /dev/null
@@ -1,33 +0,0 @@
--- workflow=auto_inc_id_mode
--- loadType=incremental
--- logDrivenType=auto_inc_id
-
--- step=1
--- source=mysql
--- dbName=int_test
--- tableName=test_inc_id_table
--- target=variables
-SELECT ${DATA_RANGE_START} AS `lowerBound`,
- MAX(`id`) AS `upperBound`
-FROM `int_test`.`test_inc_id_table`;
-
--- step=2
--- source=mysql
--- dbName=int_test
--- tableName=test_inc_id_table
--- numPartitions=4
--- lowerBound=${lowerBound}
--- upperBound=${upperBound}
--- partitionColumn=id
--- target=mysql
--- dbName=int_test
--- tableName=ods_inc_id_table
--- transaction=false
--- writeMode=upsert
-SELECT `id` AS `id`,
- `value` AS `value`,
- '${JOB_ID}' AS `job_id`,
- now() AS `job_time`
-FROM `int_test`.`test_inc_id_table`
-WHERE `id` > ${lowerBound}
- AND `id` <= ${upperBound};
diff --git a/spark/src/test/resources/tasks/int_test/compressTar.sql b/spark/src/test/resources/tasks/int_test/compressTar.sql
deleted file mode 100644
index e9a3aea..0000000
--- a/spark/src/test/resources/tasks/int_test/compressTar.sql
+++ /dev/null
@@ -1,25 +0,0 @@
--- workflow=compressTar
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=compresstar
--- encoding=utf-8
--- targetPath=/out/
--- tarPath=/out/((\w*.tar.gz))
--- tmpPath=/out/tmp/
--- bakPath=/out/bak/
--- fileNamePattern=\d{1}.txt
--- target=do_nothing
-
--- step=2
--- source=csv
--- encoding=utf-8
--- inferSchema=true
--- sep=\t
--- header=false
--- fileNamePattern=\w*_1.txt
--- selectExpr=_c0 as num
--- fileDir=/out/
--- target=do_nothing
diff --git a/spark/src/test/resources/tasks/int_test/quality_check.sql b/spark/src/test/resources/tasks/int_test/quality_check.sql
deleted file mode 100644
index 9001fca..0000000
--- a/spark/src/test/resources/tasks/int_test/quality_check.sql
+++ /dev/null
@@ -1,37 +0,0 @@
--- workflow=test_dwd_with_quality_check
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=temp
--- target=variables
-select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
- from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'),
- 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
-
--- step=2
--- source=mysql
--- dbName=int_test
--- tableName=test_ods_for_quality_check
--- options
--- idColumn=order_id
--- column.phone.qualityCheckRules=power null check(error)
--- column.value.qualityCheckRules=empty check(warn)
--- target=temp
--- tableName=`643e9314`
-select `order_id` as `order_id`,
- `phone` as `phone`,
- `value` as `value`,
- `bz_time` as `bz_time`,
- job_id as `job_id`,
- '${EFFECTIVE_START_TIME}' as effective_start_time,
- '9999-01-01 00:00:00' as effective_end_time,
- '1' as is_active,
- '1' as is_latest,
- '${DATA_RANGE_START}' as idempotent_key,
- '${DATE_END}' as dw_insert_date
-from `int_test`.`test_ods_for_quality_check`
-where `dt` = '${DATA_RANGE_START}';
diff --git a/spark/src/test/resources/tasks/int_test/sp_test.sql b/spark/src/test/resources/tasks/int_test/sp_test.sql
deleted file mode 100644
index ce81082..0000000
--- a/spark/src/test/resources/tasks/int_test/sp_test.sql
+++ /dev/null
@@ -1,17 +0,0 @@
--- workflow=sp_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=transformation
--- dbType=mysql
--- dbName=int_test
--- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcResultSetTransformer
--- methodName=transform
--- transformerType=object
--- target=mysql
--- dbName=int_test
--- tableName=sp_test
--- writeMode=append
-call my_test()
\ No newline at end of file
diff --git a/spark/src/test/resources/tasks/int_test/test_auto_create_dim_source_delta.sql b/spark/src/test/resources/tasks/int_test/test_auto_create_dim_source_delta.sql
deleted file mode 100644
index df16064..0000000
--- a/spark/src/test/resources/tasks/int_test/test_auto_create_dim_source_delta.sql
+++ /dev/null
@@ -1,31 +0,0 @@
--- workflow=test_auto_create_dim_source_delta
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=create database
--- target=delta_lake
-CREATE SCHEMA IF NOT EXISTS delta_db;
-
-
--- step=create table
--- target=delta_lake
-create or replace table delta_db.test_fact
-(
- id STRING,
- bz_time TIMESTAMP
-) using delta;
-
-
--- step=write data
--- target=delta_lake
--- dbName=delta_db
--- tableName=test_fact
--- writeMode=overwrite
-select '1' as id,
- '2022-02-02 17:12:59' as bz_time;
-
--- step=print data to console
--- source=delta_lake
--- target=console
-select * from delta_db.test_fact;
diff --git a/spark/src/test/resources/tasks/int_test/test_fact_split_source.sql b/spark/src/test/resources/tasks/int_test/test_fact_split_source.sql
deleted file mode 100644
index 6191973..0000000
--- a/spark/src/test/resources/tasks/int_test/test_fact_split_source.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=test_fact_split_source
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=mysql
--- dbName=int_test
--- tableName=test_fact_split_source
--- target=mysql
--- dbName=int_test
--- tableName=test_split
--- writeMode=append
-SELECT id,
- user_id,
- user_name,
- user_account,
- bz_time,
- '${JOB_ID}' as `job_id`,
- now() as `job_time`,
- '${DATA_RANGE_START}' AS dt
-FROM int_test.`test_fact_split_source`;
diff --git a/spark/src/test/resources/tasks/int_test/test_source.sql b/spark/src/test/resources/tasks/int_test/test_source.sql
deleted file mode 100644
index b50a1b4..0000000
--- a/spark/src/test/resources/tasks/int_test/test_source.sql
+++ /dev/null
@@ -1,20 +0,0 @@
--- workflow=test_source
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=mysql
--- dbName=int_test
--- tableName=test_source
--- target=mysql
--- dbName=int_test
--- tableName=test_ods
--- writeMode=append
-SELECT `order_id`,
- `value`,
- `bz_time`,
- '${JOB_ID}' as `job_id`,
- '${DATA_RANGE_START}' AS dt,
- now() as `job_time`
-FROM int_test.`test_source`;
diff --git a/spark/src/test/resources/tasks/int_test/test_source_for_quality_check.sql b/spark/src/test/resources/tasks/int_test/test_source_for_quality_check.sql
deleted file mode 100644
index 989ace3..0000000
--- a/spark/src/test/resources/tasks/int_test/test_source_for_quality_check.sql
+++ /dev/null
@@ -1,22 +0,0 @@
--- workflow=test_source_for_quality_check
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=mysql
--- dbName=int_test
--- tableName=test_source_for_quality_check
--- target=mysql
--- dbName=int_test
--- tableName=test_ods_for_quality_check
--- transaction=true
--- writeMode=append
-SELECT `order_id`,
- `phone`,
- `value`,
- `bz_time`,
- '${JOB_ID}' as `job_id`,
- '${DATA_RANGE_START}' AS dt,
- now() as `job_time`
-FROM `test_source_for_quality_check`;
diff --git a/spark/src/test/resources/tasks/jobDependencyCheck.sql b/spark/src/test/resources/tasks/jobDependencyCheck.sql
deleted file mode 100644
index 0ef3faf..0000000
--- a/spark/src/test/resources/tasks/jobDependencyCheck.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- workflow=jobDependencyCheck
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.transformation.JobDependencyCheckTransformer
--- methodName=transform
--- transformerType=object
--- dependencies=task-a,task-b
--- target=do_nothing
-
diff --git a/spark/src/test/resources/tasks/latest-only.sql b/spark/src/test/resources/tasks/latest-only.sql
deleted file mode 100644
index e68fe59..0000000
--- a/spark/src/test/resources/tasks/latest-only.sql
+++ /dev/null
@@ -1,11 +0,0 @@
--- workflow=latest-only
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=temp
--- target=temp
--- tableName=do_nothing_table
-SELECT 'success';
diff --git a/spark/src/test/resources/tasks/refresh-temp.sql b/spark/src/test/resources/tasks/refresh-temp.sql
deleted file mode 100644
index 822e9be..0000000
--- a/spark/src/test/resources/tasks/refresh-temp.sql
+++ /dev/null
@@ -1,11 +0,0 @@
--- workflow=refresh-temp
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
-
--- step=1
--- source=temp
--- target=temp
--- tableName=do_nothing_table
-SELECT 'success';
diff --git a/spark/src/test/resources/tasks/replace_template_tablename.sql b/spark/src/test/resources/tasks/replace_template_tablename.sql
deleted file mode 100644
index 2b68c49..0000000
--- a/spark/src/test/resources/tasks/replace_template_tablename.sql
+++ /dev/null
@@ -1,33 +0,0 @@
--- workflow=replace_template_tablename
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=variables
-select from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
- from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`,
- 'temp_source' as `sources`,
- 'temp_target' as `target`,
- 'temp_end' as `end`
-
-
--- step=2
--- source=temp
--- tableName=${sources}
--- target=temp
--- tableName=${target}
--- writeMode=overwrite
-select * from ${sources}
-
-
--- step=3
--- source=temp
--- tableName=${target}
--- target=temp
--- tableName=${end}
--- writeMode=overwrite
-select * from ${target}
diff --git a/spark/src/test/resources/tasks/session_isolation.sql b/spark/src/test/resources/tasks/session_isolation.sql
deleted file mode 100644
index 9426555..0000000
--- a/spark/src/test/resources/tasks/session_isolation.sql
+++ /dev/null
@@ -1,27 +0,0 @@
--- workflow=session_isolation
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=temp
--- target=temp
--- tableName=do_nothing_table
-SELECT 'step1';
-
--- step=2
--- source=temp
--- target=temp
--- tableName=do_nothing_table
--- conf
--- spark.sql.shuffle.partitions=1
-SELECT 'step2';
-
--- step=3
--- source=temp
--- target=temp
--- tableName=do_nothing_table
--- conf
--- spark.sql.shuffle.partitions=5
-SET spark.sql.hive.version=0.12.1;
-
diff --git a/spark/src/test/resources/tasks/sftp_test.sql b/spark/src/test/resources/tasks/sftp_test.sql
deleted file mode 100644
index 0bfd13f..0000000
--- a/spark/src/test/resources/tasks/sftp_test.sql
+++ /dev/null
@@ -1,23 +0,0 @@
--- workflow=sftp_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=sftp
--- configPrefix=sftp-ticketflap
--- path=/Users/yangwliu/sftp
--- destinationDir=/Users/yangwliu/sftp-copy
--- fileNamePattern=edw_sales_thodw
--- sourceDir=/users/yangwliu/sftp
--- readAll=false
--- target=variables
-
--- step=2
--- source=csv
--- filePath=/Users/yangwliu/sftp-copy/edw_sales_thodw_20211107.txt
--- sep=|
--- fileNamePattern=edw_sales_thodw
--- sourceDir=/users/yangwliu/sftp
--- target=variables
-
diff --git a/spark/src/test/resources/tasks/source_to_target.sql b/spark/src/test/resources/tasks/source_to_target.sql
deleted file mode 100644
index 345a433..0000000
--- a/spark/src/test/resources/tasks/source_to_target.sql
+++ /dev/null
@@ -1,19 +0,0 @@
--- workflow=source_to_target
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=h2
--- dbName=int_test
--- tableName=source
--- target=h2
--- dbName=int_test
--- tableName=target
--- writeMode=append
-SELECT id,
- value,
- '${JOB_ID}' as `job_id`,
- now() as `job_time`,
- bz_time
-FROM source;
diff --git a/spark/src/test/resources/tasks/transformer/TestDynamicLoadingTransformer.scala b/spark/src/test/resources/tasks/transformer/TestDynamicLoadingTransformer.scala
deleted file mode 100644
index 613275f..0000000
--- a/spark/src/test/resources/tasks/transformer/TestDynamicLoadingTransformer.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import com.github.sharpdata.sharpetl.spark.transformation.Transformer
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.DataFrame
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession.sparkSession
-
-object TestDynamicLoadingTransformer extends Transformer {
- override def transform(args: Map[String, String]): DataFrame = {
- val spark = ETLSparkSession.sparkSession
- val schema = List(
- org.apache.spark.sql.types.StructField("id", org.apache.spark.sql.types.IntegerType, true),
- org.apache.spark.sql.types.StructField("name", org.apache.spark.sql.types.StringType, true)
- )
-
- val data = Seq(
- org.apache.spark.sql.Row(1, args("jobId"))
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- org.apache.spark.sql.types.StructType(schema)
- )
-
- testDf.createOrReplaceTempView("test_tmp_view")
-
- (
- spark.sql("select id, name, 'aa' as address from test_tmp_view").union(
- sparkSession.sql("select 'c' as id, 'd' as name, 'cc' as address")
- )
- ).drop("address")
- }
-}
diff --git a/spark/src/test/resources/tasks/transformer/dynamic_transformer.sql b/spark/src/test/resources/tasks/transformer/dynamic_transformer.sql
deleted file mode 100644
index 6013491..0000000
--- a/spark/src/test/resources/tasks/transformer/dynamic_transformer.sql
+++ /dev/null
@@ -1,12 +0,0 @@
--- workflow=dynamic_transformer
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=transformation
--- className=com.github.sharpdata.sharpetl.spark.end2end.TestDynamicLoadingTransformer
--- methodName=transform
--- transformerType=dynamic_object
--- target=temp
--- tableName=`dynamic_tmp_transformer_result_table`
\ No newline at end of file
diff --git a/spark/src/test/resources/tasks/udf_test.sql b/spark/src/test/resources/tasks/udf_test.sql
deleted file mode 100644
index 73da074..0000000
--- a/spark/src/test/resources/tasks/udf_test.sql
+++ /dev/null
@@ -1,17 +0,0 @@
--- workflow=udf_test
--- period=1440
--- loadType=incremental
--- logDrivenType=timewindow
-
--- step=1
--- source=class
--- className=com.github.sharpdata.sharpetl.spark.end2end.TestUdfObj
--- target=udf
--- methodName=testUdf
--- udfName=test_udf
-
--- step=2
--- source=temp
--- target=temp
--- tableName=udf_result
-select test_udf('input') as `result`;
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/api/AnnotationSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/api/AnnotationSpec.scala
deleted file mode 100644
index c579199..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/api/AnnotationSpec.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.api
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.Source
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.annotation.AnnotationScanner
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import org.apache.spark.sql.SparkSession
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.funspec.AnyFunSpec
-
-class AnnotationSpec extends AnyFunSpec with BeforeAndAfterEach with SparkSessionTestWrapper {
-
- it("find object by annotations") {
- val value: Class[Source[_, _]] = AnnotationScanner.sourceRegister("do_nothing")
- assert(value != null)
-
- value.getMethod("read", classOf[WorkflowStep], classOf[JobLog], classOf[SparkSession], classOf[Variables])
- .invoke(value.newInstance(), null, null, spark, null)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/common/PatternTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/common/PatternTest.scala
deleted file mode 100644
index 50c2407..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/common/PatternTest.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.common
-
-import com.github.sharpdata.sharpetl.core.util.Constants.Pattern
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.must.Matchers.be
-import org.scalatest.matchers.should.Matchers.convertToAnyShouldWrapper
-
-class PatternTest extends AnyFunSpec {
- private val UNKNOWN = -1
- private val NUM_PARTITIONS = 1
- private val REPARTITION_COLUMNS = 2
- private val NUM_PARTITIONS_AND_REPARTITION_COLUMNS = 3
-
- describe("Repartition pattern") {
- it("should return the correct match result") {
- matchRepartitionArgsType("0") should be(UNKNOWN)
- matchRepartitionArgsType("1,") should be(UNKNOWN)
- matchRepartitionArgsType("1") should be(NUM_PARTITIONS)
- matchRepartitionArgsType("10") should be(NUM_PARTITIONS)
-
- matchRepartitionArgsType("1a") should be(UNKNOWN)
- matchRepartitionArgsType("_a") should be(REPARTITION_COLUMNS)
- matchRepartitionArgsType("a") should be(REPARTITION_COLUMNS)
- matchRepartitionArgsType("a1_") should be(REPARTITION_COLUMNS)
- matchRepartitionArgsType("a1_,B_2") should be(REPARTITION_COLUMNS)
-
- matchRepartitionArgsType("0,a1_,B_2") should be(UNKNOWN)
- matchRepartitionArgsType("10,a1_,B_2") should be(NUM_PARTITIONS_AND_REPARTITION_COLUMNS)
- }
- }
-
- def matchRepartitionArgsType(repartitionArgs: String): Int = {
- repartitionArgs match {
- case Pattern.REPARTITION_NUM_PATTERN() =>
- NUM_PARTITIONS
- case Pattern.REPARTITION_COLUMNS_PATTERN(_) =>
- REPARTITION_COLUMNS
- case Pattern.REPARTITION_NUM_COLUMNS_PATTERN(_) =>
- NUM_PARTITIONS_AND_REPARTITION_COLUMNS
- case _ =>
- UNKNOWN
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSourceTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSourceTest.scala
deleted file mode 100644
index bef1fe2..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/CompressTarDataSourceTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import com.github.sharpdata.sharpetl.core.datasource.config.{CSVDataSourceConfig, CompressTarConfig}
-import com.github.sharpdata.sharpetl.core.util.{HDFSUtil, IOUtil, WorkflowReader}
-import org.apache.spark.SparkConf
-import org.scalatest.funspec.AnyFunSpec
-
-import java.io.File
-import java.nio.file.Files
-
-
-class CompressTarDataSourceTest extends AnyFunSpec with SparkSessionTestWrapper {
- it("should load data from tar and can be read by follow step ") {
- val line = 100
-
- val files = IOUtil.listFiles("data.tar.gz") ++ IOUtil.listFilesJar("data.tar.gz")
- val rootPath = files.filter(it => it.indexOf("data.tar.gz") != -1).head.replace("/data.tar.gz", "")
- val steps = WorkflowReader.readWorkflow("compressTar").steps
- val config = steps.head.getSourceConfig[CompressTarConfig]
- val config2 = steps.last.getSourceConfig[CSVDataSourceConfig]
-
-
- val targetPath = s"${rootPath}${config.targetPath}"
- config.setTarPath(s"${rootPath}${config.tarPath}")
- config.setTargetPath(targetPath)
- config.setTmpPath(s"${rootPath}${config.tmpPath}")
- config.setBakPath(s"${rootPath}${config.bakPath}")
-
- //mock data
- val tesTemplatePath = rootPath
- val dir = new File(targetPath)
- val tmp = new File(config.getTmpPath)
- val bak = new File(config.getBakPath)
- val data1 = new File(s"${targetPath}/data.tar.gz")
- val data2 = new File(s"${targetPath}/data2.tar.gz")
-
- if (!dir.exists()) dir.mkdirs()
- if (!tmp.exists()) tmp.mkdirs()
- if (!bak.exists()) bak.mkdirs()
- if (data1.exists()) data1.delete()
- if (data2.exists()) data2.delete()
-
- Files.copy(new File(s"${tesTemplatePath}/data.tar.gz").toPath, data1.toPath)
- Files.copy(new File(s"${tesTemplatePath}/data2.tar.gz").toPath, data2.toPath)
-
-
- new CompressTarDataSource().loadFromCompressTar(spark, config)
-
- config2.setFileDir(s"${rootPath}${config2.fileDir}")
- val file = HDFSUtil.listFileUrl(
- config2.getFileDir,
- config2.getFileNamePattern
- )
- config2.setFilePath(file.head)
-
- val df2 = new CSVDataSource().loadFromHdfs(spark, config2)
- df2.take(line).foreach(println)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSourceTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSourceTest.scala
deleted file mode 100644
index a84305d..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HdfsDataSourceTest.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.util.HDFSUtil
-import org.scalatest.funspec.AnyFunSpec
-
-class HdfsDataSourceTest extends AnyFunSpec {
-
- ignore("Hdfs Data Source") {
- it("should list all task files") {
- val taskPath = "hdfs:/user/hive/data-pipeline/tasks/"
- val tasks = HDFSUtil.recursiveListFiles(taskPath)
- assert(tasks.nonEmpty)
- }
- }
-
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSourceTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSourceTest.scala
deleted file mode 100644
index 457811d..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HiveDataSourceTest.scala
+++ /dev/null
@@ -1,123 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode.MERGE_WRITE
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
-import org.mockito.{ArgumentMatchersSugar, MockitoSugar}
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-
-class HiveDataSourceTest extends AnyFlatSpec with MockitoSugar with ArgumentMatchersSugar with SparkSessionTestWrapper with should.Matchers {
-
- it should "extract partition selection" in {
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111", "2021", "12", "01"),
- Row(2, "222", "2021", "12", "01"),
- Row(1, null, "2021", "12", "02"),
- Row(2, "222", "2021", "11", "01")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val sourceTableName = "test_hive_union_sql"
- testDf.createOrReplaceTempView(sourceTableName)
-
- val step = new WorkflowStep
-
- step.sql =
- """
- |select `hour` as `hour`,
- | `minute` as `minute`,
- | `year` as `year`,
- | `month` as `month`,
- | `day` as `day`
- |from `${{developer}}`.`${{pre_ods_card_result}}`
- |where `year`= '2022' and `month`= '12' and `day`= '12'
- | and `hour`= '10' and `minute`= '10';
- |""".stripMargin
-
- val sql: String = new HiveDataSource().selfUnionClause(sourceTableName, "hive_table", Array("year", "month", "day"), MERGE_WRITE, step)
- sql should be(
- """union all
- |select * from hive_table where ((year = 2021 and month = 12 and day = 01) or (year = 2021 and month = 12 and day = 02) or (year = 2021 and month = 11 and day = 01)) and !(`year`= '2022' and `month`= '12' and `day`= '12'
- | and `hour`= '10' and `minute`= '10')""".stripMargin)
- }
-
- it should "extract partition selection for complex sql" in {
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true),
- StructField("hour", StringType, true),
- StructField("minute", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111", "2021", "12", "01", "10", "10"),
- Row(2, "222", "2021", "12", "01", "10", "10"),
- Row(1, null, "2021", "12", "02", "10", "10"),
- Row(2, "222", "2021", "11", "01", "10", "10")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val sourceTableName = "test_mergewrite"
- testDf.createOrReplaceTempView(sourceTableName)
-
- val step = new WorkflowStep
-
- step.sql =
- s"""
- |with window_result as (
- | SELECT t.*,
- | row_number() over(partition by t.`id`, t.`name` order by t.`id` desc) as `insert_id`
- | FROM $sourceTableName t
- | where `year`= '2022' and `month`= '12' and `day`= '12'
- | and `hour`= '10' and `minute`= '10'),
- | distinct_result as (
- | SELECT `id` as `id`,
- | `name` as `name`,
- | `hour` as `hour`,
- | `minute` as `minute`,
- | `year` as `year`,
- | `month` as `month`,
- | `day` as `day`
- | FROM window_result WHERE `insert_id` = 1)
- |select `id` as `id`,
- | `name` as `name`,
- | `hour` as `hour`,
- | `minute` as `minute`,
- | `year` as `year`,
- | `month` as `month`,
- | `day` as `day`
- |from distinct_result
- |where `year`= '2022' and `month`= '12' and `day`= '12'
- | and `hour`= '10' and `minute`= '10'
- |""".stripMargin
-
- val sql: String = new HiveDataSource().selfUnionClause(sourceTableName, "hive_table", Array("year", "month", "day"), MERGE_WRITE, step)
- sql should be(
- """union all
- |select * from hive_table where ((year = 2021 and month = 12 and day = 01) or (year = 2021 and month = 12 and day = 02) or (year = 2021 and month = 11 and day = 01)) and !(`year`= '2022' and `month`= '12' and `day`= '12'
- | and `hour`= '10' and `minute`= '10')""".stripMargin)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSourceSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSourceSpec.scala
deleted file mode 100644
index 46cb94e..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/HttpDataSourceSpec.scala
+++ /dev/null
@@ -1,194 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.mockserver.client.MockServerClient
-import org.mockserver.model.HttpRequest
-import org.mockserver.model.HttpRequest.request
-import org.mockserver.model.HttpResponse.response
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover}
-import org.testcontainers.containers.MockServerContainer
-import org.testcontainers.utility.DockerImageName
-
-import scala.jdk.CollectionConverters._
-
-
-@DoNotDiscover
-class HttpDataSourceSpec extends ETLSuit with BeforeAndAfterAll {
-
- private val mockHttpServer = new MockServerContainer(DockerImageName.parse("jamesdbloom/mockserver:mockserver-5.11.2"))
-
- override protected def beforeAll(): Unit = {
- mockHttpServer.setPortBindings(List("1080:1080").asJava)
- mockHttpServer.start()
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- mockHttpServer.stop()
- super.afterAll()
- }
-
- describe("HttpDataSource") {
- val requestDef: HttpRequest = request().withPath("/get_workday")
- val testJsonStr: String =
- """{
- | "Report_Entry": [
- | {
- | "a": "a",
- | "b": "b",
- | "c": "c",
- | "d": "d",
- | "e": "e",
- | "f": "f",
- | "g": "g",
- | "h": "h",
- | "i": "i",
- | "j": "j",
- | "k": "k",
- | "l": "l",
- | "m": "m",
- | "n": "n",
- | "o": "o",
- | "p": "p",
- | "q": "q"
- | },
- | {
- | "a": "a",
- | "b": "b",
- | "c": "c",
- | "d": "d",
- | "e": "e",
- | "f": "f",
- | "g": "g",
- | "h": "h",
- | "i": "i",
- | "j": "j",
- | "k": "k",
- | "l": "l",
- | "m": "m",
- | "n": "n",
- | "o": "o",
- | "p": "p",
- | "q": "q"
- | }
- | ]
- |}""".stripMargin
- val firstRow = "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,2021,11,28,15"
-
- it("should save in spark temp table") {
- if (spark.version.startsWith("2.3")) {
- ETLLogger.error("`struct` complex type does NOT support Spark 2.3.x")
- } else {
- new MockServerClient(mockHttpServer.getHost, mockHttpServer.getServerPort)
- .clear(requestDef)
- .when(requestDef)
- .respond(response().withBody(testJsonStr))
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=http_datasource", "--period=1440",
- "--local", s"--default-start-time=2021-11-28 15:30:30", "--env=test", "--once")
-
- runJob(jobParameters)
- val df_source_data = spark.sql("select * from `source_data`")
- val string_source_data = df_source_data.collect()(0)(0).toString
- val df_source_workday = spark.sql("select * from `source_data_workday`")
- val string_source_workday = df_source_workday.head().mkString(",")
-
- assert(string_source_data == testJsonStr)
- assert(string_source_workday == firstRow)
- }
- }
-
- it("should save variables in spark temp table") {
- new MockServerClient(mockHttpServer.getHost, mockHttpServer.getServerPort)
- .clear(requestDef)
- .when(requestDef)
- .respond(response().withBody(
- """
- |{
- | "firstName": "John",
- | "lastName" : "doe",
- | "age" : 26,
- | "address" : {
- | "streetAddress": "naist street",
- | "city" : "Nara",
- | "postalCode" : "630-0192"
- | },
- | "phoneNumbers": [
- | {
- | "type" : "iPhone",
- | "number": "0123-4567-8888"
- | },
- | {
- | "type" : "home",
- | "number": "0123-4567-8910"
- | }
- | ]
- |}
- |""".stripMargin))
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=http_datasource_to_variables", "--period=1440",
- "--local", s"--default-start-time=2021-11-28 15:30:30", "--env=test", "--once")
-
- runJob(jobParameters)
- val df_types = spark.sql("select `types` from `target_data_types`")
- val string_types = df_types.head().mkString(",")
-
- assert(string_types == "iPhone__home")
- }
- }
-
- describe("loop over HttpDataSource") {
- def requestDef(tableName: String): HttpRequest = request().withPath(s"/get_from_table/$tableName")
-
- def testJsonStr(tableName: String): String =
- s"""{
- | "result": "result_of_$tableName"
- |}""".stripMargin
-
- it("should send loop request") {
- if (spark.version.startsWith("2.3")) {
- ETLLogger.error("`struct` complex type does NOT support Spark 2.3.x")
- } else {
- val mockServerClient = new MockServerClient(mockHttpServer.getHost, mockHttpServer.getServerPort)
- (1 to 4).foreach(it => {
-
- val req = requestDef(s"test_$it")
- mockServerClient
- .clear(req)
- .when(req)
- .respond(response().withBody(testJsonStr(s"test_$it")))
-
- })
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=http_loop_request", "--period=1440",
- "--local", s"--default-start-time=2021-11-28 15:30:30", "--env=test", "--once")
-
- runJob(jobParameters)
- spark
- .sql("select * from `target_temp_table`")
- .select("result")
- .toLocalIterator().asScala.toList.mkString("\n") should be(
- """[{
- | "result": "result_of_test_1"
- |}]
- |[{
- | "result": "result_of_test_2"
- |}]
- |[{
- | "result": "result_of_test_3"
- |}]
- |[{
- | "result": "result_of_test_4"
- |}]""".stripMargin
- )
- }
- }
- }
-
- override val createTableSql: String = ""
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFConfigExtensionTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFConfigExtensionTest.scala
deleted file mode 100644
index 646a2ed..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/UDFConfigExtensionTest.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource
-
-import com.github.sharpdata.sharpetl.spark.extension.UDFExtension
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import org.mockito.{ArgumentMatchersSugar, MockitoSugar}
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-class UDFClass extends Serializable {
- def map_filter(map: Map[String, String], num: Int): Map[String, Int] = {
- map
- .map(t => t._1 -> t._2.toInt)
- .filter(_._2 > num)
- }
-}
-
-object UDFObj extends Serializable {
- def doNothing(): Int = {
- 1
- }
-}
-
-class UDFConfigExtensionTest extends AnyFlatSpec with MockitoSugar with ArgumentMatchersSugar with SparkSessionTestWrapper with should.Matchers {
-
-
- it should "works with udf" in {
- try {
- UDFExtension.registerUDF(
- spark,
- "class",
- "map_filter",
- "com.github.sharpdata.sharpetl.spark.datasource.UDFClass",
- "map_filter"
- )
- UDFExtension.registerUDF(
- spark,
- "object",
- "do_nothing",
- "com.github.sharpdata.sharpetl.spark.datasource.UDFObj",
- "doNothing"
- )
- val sql =
- """
- |select map_filter(str_to_map('a:1,b:1,c:0', ',', ':'), 0),
- | do_nothing()
- |""".stripMargin
- spark
- .sql(sql)
- .show()
- }
-
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSourceTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSourceTest.scala
deleted file mode 100644
index 1b23d30..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/AbstractJdbcDataSourceTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.scalatest.funspec.AnyFunSpec
-import org.mockito.MockitoSugar
-import org.apache.spark.sql.{DataFrame, SparkSession}
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.POSTGRES
-import com.github.sharpdata.sharpetl.core.util.Constants.WriteMode.APPEND
-import org.mockito.stubbing.ReturnsDeepStubs
-
-class AbstractJdbcDataSourceTest extends AnyFunSpec with MockitoSugar {
-
- val sourceConfig = new DBDataSourceConfig
- val sourceOptions = Map("queryTimeout" -> "5")
- sourceConfig.setDbName("psi")
- sourceConfig.setOptions(sourceOptions)
-
- val targetConfig = new DBDataSourceConfig
- targetConfig.setDbName("psi")
- targetConfig.setTableName("t_company")
- targetConfig.setOptions(sourceOptions)
-
- val sql = "select * from t_company limit 10"
- val step = new WorkflowStep
- step.setStep("1")
- step.setSourceConfig(sourceConfig)
- step.setTargetConfig(targetConfig)
- step.setSql(sql)
- step.setWriteMode(APPEND)
-
- describe("AbstractJdbcDataSource") {
- it("should call spark load with source options") {
- val dataSource = mock[AbstractJdbcDataSource]
- val spark = mock[SparkSession]
-
- val variables = Variables(collection.mutable.Map.empty[String, String])
- val expectOptions = Map("dbtable" -> s"($sql)").++(sourceOptions)
-
- when(dataSource.load(spark, step, variables)).thenCallRealMethod()
- when(dataSource.buildSelectSql(sql)).thenCallRealMethod()
-
- dataSource.load(spark, step, variables)
- verify(dataSource, times(1)).load(spark, expectOptions, "psi")
- }
-
- it("should call df save with target options") {
- val dataSource = spy(new MockPostgresDataSource)
- val df = mock[DataFrame](ReturnsDeepStubs)
-
- when(dataSource.getCols("psi", "t_company")).thenCallRealMethod()
-
- dataSource.save(df, step)
-
- val expectOptions = Map("dbtable" -> " ") ++ sourceOptions ++ Map("numPartitions" -> "8", "batchsize" -> "1024")
- verify(dataSource).buildJdbcConfig("psi", POSTGRES, expectOptions)
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MockPostgresDataSource.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MockPostgresDataSource.scala
deleted file mode 100644
index ef6a51b..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/datasource/jdbc/MockPostgresDataSource.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.datasource.jdbc
-
-import org.apache.spark.sql.types.StructField
-import org.apache.spark.sql.types.StringType
-
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType.POSTGRES
-
-
-class MockPostgresDataSource extends AbstractJdbcDataSource(POSTGRES) {
- override def getCols(
- targetDBName: String,
- targetTableName: String
- ): (Seq[StructField], Seq[StructField]) = (
- Seq(StructField("id", StringType)),
- Seq(StructField("code", StringType))
- )
-
- override def makeUpsertCols(
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]
- ): Seq[StructField] = {
- primaryCols ++ notPrimaryCols
- }
-
- override def makeUpsertSql(
- tableName: String,
- primaryCols: Seq[StructField],
- notPrimaryCols: Seq[StructField]
- ): String = ""
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/BatchJobSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/BatchJobSpec.scala
deleted file mode 100644
index 6c6ab65..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/BatchJobSpec.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.core.util.DateUtil
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.apache.spark.sql.types._
-import org.apache.spark.sql.{DataFrame, Row}
-import org.scalatest.DoNotDiscover
-
-import java.sql.Timestamp
-import java.time.LocalDateTime
-
-@DoNotDiscover
-class BatchJobSpec extends MysqlSuit {
- override val createTableSql: String = ""
- override val targetDbName = "int_test"
- override val sourceDbName: String = "int_test"
-
- val data = Seq(
- Row("jiale", Timestamp.valueOf("2021-01-01 08:00:00")),
- Row("super jiale", Timestamp.valueOf("2021-01-01 08:00:00"))
- )
-
- val schema = List(
- StructField("name", StringType, true),
- StructField("update_time", TimestampType, true)
- )
-
- val sampleDataDf: DataFrame = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val expected: DataFrame = spark.createDataFrame(
- spark.sparkContext.parallelize(
- Seq(
- Row("jiale", Timestamp.valueOf("2021-01-01 08:00:00"), 199),
- Row("super jiale", Timestamp.valueOf("2021-01-01 08:00:00"), 199)
- )
- ),
- StructType(List(
- StructField("new_name", StringType, true),
- StructField("update_time", TimestampType, true),
- StructField("test_expression", IntegerType, true)
- ))
- )
-
- it("should only the latest job only") {
- val now = LocalDateTime.now()
- val startTime = now.minusDays(4L).format(DateUtil.L_YYYY_MM_DD_HH_MM_SS)
- val dataRangeStart = now.minusDays(1L).format(DateUtil.INT_YYYY_MM_DD_HH_MM_SS)
-
- val jobParameters: Array[String] = Array("batch-job",
- "--names=latest-only", "--period=1440",
- "--local", s"--default-start-time=${startTime}", "--env=test", "--latest-only")
-
- runJob(jobParameters)
-
- val df = readFromLog("job_log").where("workflow_name = 'latest-only'")
- df.count() should be(1)
- df.select("data_range_start").head().get(0) should be(dataRangeStart)
- }
-
- it("should refresh time-based job") {
- val startTime = LocalDateTime.now().minusDays(4L).format(DateUtil.L_YYYY_MM_DD_HH_MM_SS)
- val refreshStart = LocalDateTime.now().minusDays(2L).format(DateUtil.INT_YYYY_MM_DD_HH_MM_SS)
- val refreshEnd = LocalDateTime.now().minusDays(1L).format(DateUtil.INT_YYYY_MM_DD_HH_MM_SS)
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=refresh-temp", "--period=1440",
- "--local", s"--default-start-time=${startTime}", "--env=test", "--once")
-
- runJob(jobParameters)
-
- readFromLog("job_log").where("workflow_name = 'refresh-temp'").count() should be(1)
-
- val newJobParameters: Array[String] = Array("single-job",
- "--name=refresh-temp", "--period=1440", "--refresh", s"--refresh-range-start=${refreshStart}", s"--refresh-range-end=${refreshEnd}",
- "--local", "--env=test")
-
- runJob(newJobParameters)
-
- readFromLog("job_log").where("workflow_name = 'refresh-temp'").count() should be(2)
- }
-
- it("should skip if already done") {
- runJob(Array("batch-job",
- s"--names=refresh-temp", "--period=1440",
- "--local",
- "--env=test", "--parallelism=1"))
-
- readFromLog("job_log").where("workflow_name = 'refresh-temp'").count() should be(3)
-
- runJob(Array("batch-job",
- s"--names=refresh-temp", "--period=1440",
- "--local",
- "--env=test", "--parallelism=1"))
-
- readFromLog("job_log").where("workflow_name = 'refresh-temp'").count() should be(3)
- }
-
- it("should from user defined step id") {
- runJob(Array("single-job",
- s"--name=from_step_id", "--period=1440",
- "--local",
- "--env=test", "--once", "--from-step=2"))
- }
-
- it("should exclude from user defined step id") {
- runJob(Array("single-job",
- s"--name=from_step_id", "--period=1440",
- "--local",
- "--env=test", "--once", "--exclude-steps=1"))
- }
-
- it("should run hello world successfully") {
- runJob(Array("single-job",
- s"--name=hello_world", "--period=1440",
- "--local",
- "--env=test", "--once"))
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/DataQualityCheckSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/DataQualityCheckSpec.scala
deleted file mode 100644
index 2aa0ebd..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/DataQualityCheckSpec.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-import org.scalatest.matchers.should
-
-/**
- * 1. source => ods
- * 2. ods => dwd
- * 3. error data 不应该出现在结果里面
- * 4. 没有错误的和warn的应该出现在结果中
- */
-@DoNotDiscover
-class DataQualityCheckSpec extends MysqlSuit with should.Matchers {
-
- override val createTableSql: String =
- "CREATE TABLE IF NOT EXISTS test_ods_for_quality_check" +
- " (order_id int, phone varchar(255), value varchar(255), bz_time DATETIME, dt varchar(255), job_id varchar(255), job_time varchar(255));"
-
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "test_source_for_quality_check"
- override val targetDbName: String = "int_test"
-
- val firstDay = "2021-10-01 00:00:00"
-
- val source2odsParameters: Array[String] = Array("single-job",
- "--name=test_source_for_quality_check", "--period=1440",
- "--local", s"--default-start-time=${firstDay}", "--env=test", "--once")
-
- val ods2dwdParameters: Array[String] = Array("single-job",
- "--name=quality_check", "--period=1440",
- "--local", s"--default-start-time=${firstDay}", "--env=test", "--once")
-
-
- val schema = List(
- StructField("order_id", IntegerType, true),
- StructField("phone", StringType, true),
- StructField("value", StringType, true),
- StructField("bz_time", TimestampType, true)
- )
-
- val firstDayData = Seq(
- Row(1, "110", "2333", getTimeStampFromStr("2021-10-07 17:12:59")), // normal
- Row(2, null, "aba aba", getTimeStampFromStr("2021-10-08 17:12:59")), // error
- Row(3, "110", "", getTimeStampFromStr("2021-10-08 17:12:59")) //warn
- )
-
- val firstDayDf = spark.createDataFrame(
- spark.sparkContext.parallelize(firstDayData),
- StructType(schema)
- )
-
- val dwdSchema = List(
- StructField("order_id", IntegerType, true),
- StructField("phone", StringType, true),
- StructField("value", StringType, true),
- StructField("bz_time", TimestampType, true)
- )
-
- val expectedData = Seq(
- Row(1, "110", "2333", getTimeStampFromStr("2021-10-07 17:12:59")), //normal
- //Row(2, null, "aba aba", getTimeStampFromStr("2021-10-08 17:12:59")), //error
- Row(3, "110", "", getTimeStampFromStr("2021-10-08 17:12:59")) //warn
- )
-
- val expectedDf = spark.createDataFrame(
- spark.sparkContext.parallelize(expectedData),
- StructType(dwdSchema)
- )
-
- val qualityCheckSchema = List(
- StructField("job_id", IntegerType, true),
- StructField("job_name", StringType, true),
- StructField("column", StringType, true),
- StructField("data_check_type", StringType, true),
- StructField("ids", IntegerType, true),
- StructField("error_type", StringType, true),
- StructField("warn_count", IntegerType, true),
- StructField("error_count", IntegerType, true)
- )
-
- val expectedQualityCheck = Seq(
- Row(2, "test_dwd_with_quality_check-20211001000000", "phone", "power null check(error)", 2, "error", 0, 1),
- Row(2, "test_dwd_with_quality_check-20211001000000", "value", "empty check(warn)", 3, "warn", 1, 0)
- )
-
- val expectedQualityCheckDf = spark.createDataFrame(
- spark.sparkContext.parallelize(expectedQualityCheck),
- StructType(qualityCheckSchema)
- )
-
- it("error data should not sink to dwd, warn & normal data could") {
- execute(createTableSql)
- writeDataToSource(firstDayDf, sourceTableName)
- //1. source => ods
- runJob(source2odsParameters)
- //2. ods => dwd
- runJob(ods2dwdParameters)
- val resultDf = readFromSource("test_ods_for_quality_check").drop("dt")
- assertSmallDataFrameEquality(resultDf, firstDayDf, orderedComparison = false)
-
- // 3. check data
- val dwdDf = spark.sql("select * from `643e9314`").drop("idempotent_key", "dw_insert_date", "effective_start_time", "effective_end_time", "is_active", "is_latest")
- assertSmallDataFrameEquality(dwdDf.drop("job_id"), expectedDf, orderedComparison = false)
- // 4. 检查 quality_check_log table
- val qualityCheckDf =
- spark.read.format("jdbc")
- .option("url", "jdbc:mysql://localhost:2333/sharp_etl")
- .option("user", "admin").option("password", "admin").option("dbtable", "quality_check_log")
- .load()
- .drop("id", "create_time", "last_update_time")
- val qualityCheckList =
- qualityCheckDf.select("job_name", "column", "data_check_type", "ids", "error_type", "warn_count", "error_count")
- .rdd.toLocalIterator.toSeq.map(_.toString())
- val expectedQualityCheckList =
- expectedQualityCheckDf.select("job_name", "column", "data_check_type", "ids", "error_type", "warn_count", "error_count")
- .rdd.toLocalIterator.toSeq.map(_.toString())
- qualityCheckList should contain theSameElementsAs expectedQualityCheckList
- }
-
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/ETLSuit.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/ETLSuit.scala
deleted file mode 100644
index f3367d5..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/ETLSuit.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.core.syntax.Workflow
-import com.github.sharpdata.sharpetl.spark.cli.Command
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import com.github.sharpdata.sharpetl.spark.test.DataFrameComparer
-import com.github.sharpdata.sharpetl.core.util.DateUtil.YYYY_MM_DD_HH_MM_SS
-import org.apache.spark.sql.DataFrame
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.should
-import picocli.CommandLine
-
-import java.sql.{DriverManager, SQLException}
-import scala.util.control.NoStackTrace
-
-trait ETLSuit extends AnyFunSpec
- with should.Matchers
- with SparkSessionTestWrapper
- with DataFrameComparer
- with BeforeAndAfterEach {
-
- val createTableSql: String = ""
- val sourceDbName: String = "int_test"
- val targetDbName: String = "int_test"
- var logDbPort: Int = 2333
-
- val wf = workflow("workflowName")
-
- def workflow(name: String) = Workflow(name, "1440", "incremental", "timewindow", null, null, null, -1, null, false, null, Map(), Nil) // scalastyle:off
-
- def readFromLog(targetTable: String): DataFrame = {
- spark.read
- .format("jdbc")
- .option("url", s"jdbc:mysql://localhost:$logDbPort/sharp_etl")
- .option("dbtable", targetTable)
- .option("user", "admin")
- .option("password", "admin")
- .load()
- .drop("job_id", "job_time")
- }
-
- def executeInLog(sql: String, dbName: String): Boolean = {
- val url = s"jdbc:mysql://localhost:$logDbPort/$dbName"
- val connection = DriverManager.getConnection(url, "admin", "admin")
- val statement = connection.createStatement()
- try {
- statement.execute(sql)
- } catch {
- case ex: SQLException => throw new RuntimeException(ex)
- } finally {
- if (connection != null) connection.close()
- if (statement != null) statement.close()
- }
- }
-
- def executeMigration(sql: String): Boolean = {
- executeInLog(sql, "sharp_etl")
- }
-
- def getTimeStampFromStr(str: String): java.sql.Timestamp = {
- import java.sql.Timestamp
- val parsedDate = YYYY_MM_DD_HH_MM_SS.parse(str)
- new Timestamp(parsedDate.getTime)
- }
-}
-
-object ETLSuit {
- private val errorHandler = new CommandLine.IExecutionExceptionHandler() {
- def handleExecutionException(ex: Exception, commandLine: CommandLine, parseResult: CommandLine.ParseResult): Int = {
- ex.printStackTrace()
- commandLine.getCommandSpec.exitCodeOnExecutionException
- }
- }
-
- def runJob(parameters: Array[String]): Unit = {
- val exitCode = new CommandLine(new Command()).setExecutionExceptionHandler(errorHandler).execute(
- parameters :+ "--release-resource=false": _*
- )
-
- if (exitCode != 0) {
- throw JobFailedException()
- }
- }
-}
-
-final case class JobFailedException() extends RuntimeException with NoStackTrace
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/IncrementalAutoIncIDModeSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/IncrementalAutoIncIDModeSpec.scala
deleted file mode 100644
index 5225e97..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/IncrementalAutoIncIDModeSpec.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-
-import java.util.UUID
-
-@DoNotDiscover
-class IncrementalAutoIncIDModeSpec extends MysqlSuit {
-
- override val createTableSql: String =
- "CREATE TABLE IF NOT EXISTS ods_inc_id_table" +
- " (id int, value varchar(255), job_id varchar(255), job_time varchar(255));"
-
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "test_inc_id_table"
- override val targetDbName: String = "int_test"
-
- val source2odsParametersFirstTime: Array[String] = Array("single-job",
- "--name=auto_inc_id_mode", "--period=1440",
- "--local", s"--default-start=9999", "--env=test", "--once")
-
- val source2odsParametersSecondTime: Array[String] = Array("single-job",
- "--name=auto_inc_id_mode", "--period=1440",
- "--local", "--env=test", "--once")
-
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("value", StringType, true)
- )
-
- val firstDayData = (0 until 100).map { idx =>
- Row(10000 + idx, UUID.randomUUID.toString)
- }
-
- val firstDayDf = spark.createDataFrame(
- spark.sparkContext.parallelize(firstDayData),
- StructType(schema)
- )
-
- val secondDayData = (0 until 100).map { idx =>
- Row(10100 + idx, UUID.randomUUID.toString)
- }
-
- val secondDayDf = spark.createDataFrame(
- spark.sparkContext.parallelize(secondDayData),
- StructType(schema)
- )
-
- it("incremental auto inc id") {
- execute(createTableSql)
- writeDataToSource(firstDayDf, sourceTableName)
- runJob(source2odsParametersFirstTime)
- writeDataToSource(secondDayDf, sourceTableName)
- runJob(source2odsParametersSecondTime)
- val dwdDf = readFromSource("ods_inc_id_table")
- dwdDf.count() should be(200)
- }
-
- it("refresh auto inc id") {
- val refresh: Array[String] = Array("single-job",
- "--name=auto_inc_id_mode", "--refresh",
- "--local", "--refresh-range-start=10000", "--refresh-range-end=10005", "--env=test", "--once")
- runJob(refresh)
- val dwdDf = readFromSource("ods_inc_id_table")
- dwdDf.count() should be(205)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/PropertyLoadingSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/PropertyLoadingSpec.scala
deleted file mode 100644
index 7ed4c15..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/PropertyLoadingSpec.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.core.util.{DateUtil, ETLConfig, WorkflowReader}
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.{FixedMySQLContainer, MysqlSuit}
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.mockito.ArgumentMatchers.anyString
-import org.mockito.MockitoSugar.{when, withObjectMocked}
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover}
-
-import java.sql.Timestamp
-import java.time.LocalDateTime
-
-@DoNotDiscover
-class PropertyLoadingSpec extends MysqlSuit with BeforeAndAfterAll {
-
- val migrationMysql = new FixedMySQLContainer("mysql:5.7")
- val dataMysql = new FixedMySQLContainer("mysql:5.7")
-
- override protected def beforeEach(): Unit = {
- ETLConfig.reInitProperties()
- }
-
- override protected def beforeAll(): Unit = {
- ETLConfig.reInitProperties()
- migrationMysql.configurePort(logDbPort, "sharp_etl")
- migrationMysql.start()
-
- dataMysql.configurePort(dataPort, "int_test")
- dataMysql.start()
- execute(createTableSql)
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- execute("truncate table target")
- execute("truncate table source")
- migrationMysql.stop()
- dataMysql.stop()
- super.afterAll()
- }
-
- override val createTableSql: String =
- "CREATE TABLE IF NOT EXISTS target" +
- " (id int, value varchar(255), bz_time timestamp, job_id varchar(255), job_time varchar(255));"
-
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "source"
- override val targetDbName: String = "int_test"
-
- val startTime = LocalDateTime.now().minusDays(1L).format(DateUtil.L_YYYY_MM_DD_HH_MM_SS)
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=source_to_target", "--period=1440",
- "--local", s"--default-start-time=${startTime}")
-
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("value", StringType, true),
- StructField("bz_time", TimestampType, true)
- )
-
- val time = Timestamp.valueOf(LocalDateTime.of(2021, 10, 1, 0, 0, 0))
-
- val data = Seq(
- Row(1, "111", time),
- Row(2, "222", time)
- )
-
- val sampleDataDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- it("should replace variable through command line parameter and throw exception") {
- writeDataToSource(sampleDataDf, sourceTableName)
- val jobParametersWithExtra = jobParameters ++ Array("--once", "--env=test", "--override=mysql.password=XXXX,foo=bar,balabala=a=b=c=d")
- withObjectMocked[WorkflowReader.type]{
- when(WorkflowReader.readWorkflow(anyString())).thenReturn(wf)
- runJob(jobParametersWithExtra)
- }
- assert(ETLConfig.getProperty("mysql.password") == "XXXX")
- assert(ETLConfig.getProperty("foo") == "bar")
- assert(ETLConfig.getProperty("balabala") == "a=b=c=d")
- }
-
- it("should read from local file system") {
- writeDataToSource(sampleDataDf, sourceTableName)
- val filePath = getClass.getResource("/application-test.properties").toString
- val jobParametersWithExtra = jobParameters ++ Array("--once", "--env=test", s"--property=$filePath")
- withObjectMocked[WorkflowReader.type]{
- when(WorkflowReader.readWorkflow(anyString())).thenReturn(wf)
- runJob(jobParametersWithExtra)
- }
- assert(ETLConfig.getProperty("from_file_path") == "true")
- assert(ETLConfig.getProperty("flyway.password") == "admin")
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SequentialExecutor.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SequentialExecutor.scala
deleted file mode 100644
index 5ce138f..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SequentialExecutor.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuitExecutor
-import com.github.sharpdata.sharpetl.spark.end2end.postgres.PostgresSuitExecutor
-import org.scalatest.Sequential
-
-class SequentialExecutor extends
- Sequential(new PropertyLoadingSpec, new PostgresSuitExecutor, new MysqlSuitExecutor)
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SkipRunningJobSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SkipRunningJobSpec.scala
deleted file mode 100644
index cddf0dd..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SkipRunningJobSpec.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.scalatest.DoNotDiscover
-
-
-/**
- * 1. create a running log in `job_log` table
- * 2. run task (skipRunning = true), assert exception thrown
- * 3. run task (skipRunning = false), no exception thrown
- */
-@DoNotDiscover
-class SkipRunningJobSpec extends MysqlSuit {
- override val createTableSql: String = ""
- override val targetDbName = "int_test"
- override val sourceDbName: String = "int_test"
-
- val firstDay = "2021-10-01 00:00:00"
-
- def jobParameters(jobName: String): Array[String] = Array("single-job",
- s"--name=$jobName", "--period=1440",
- "--local", s"--default-start-time=$firstDay", "--env=test", "--once")
-
- it("should kill running job when --skip-running=false") {
- // create a running log in `job_log` table
- executeMigration(
- """INSERT INTO job_log VALUES('uuid-job','do_nothing',1440,'do_nothing-20211001000000',20211001000000, 20211002000000,
- |'2021-10-30 19:08:47','2021-10-30 19:08:50','RUNNING','2021-10-30 19:08:47','2021-10-30 19:08:50','datetime', '', '', 'local-fake-app', '', '')"""
- .stripMargin)
- // run task (skipRunning = true), assert exception thrown
- assertThrows[JobFailedException] {
- runJob(jobParameters("do_nothing"))
- }
- // run task (skipRunning = false), no exception thrown
- runJob(jobParameters("do_nothing") :+ "--skip-running=false")
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/Source2TargetSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/Source2TargetSpec.scala
deleted file mode 100644
index e927c52..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/Source2TargetSpec.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.core.util.DateUtil
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-
-import java.sql.Timestamp
-import java.time.LocalDateTime
-
-@DoNotDiscover
-class Source2TargetSpec extends MysqlSuit {
-
- override val createTableSql: String =
- "CREATE TABLE IF NOT EXISTS target" +
- " (id int, value varchar(255), bz_time timestamp, job_id varchar(255), job_time varchar(255));"
-
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "source"
- override val targetDbName: String = "int_test"
-
- val startTime = LocalDateTime.now().minusDays(1L).format(DateUtil.L_YYYY_MM_DD_HH_MM_SS)
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=source_to_target", "--period=1440",
- "--local", s"--default-start-time=${startTime}", "--env=test")
-
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("value", StringType, true),
- StructField("bz_time", TimestampType, true)
- )
-
- val time = Timestamp.valueOf(LocalDateTime.of(2021, 10, 1, 0, 0, 0))
-
- val data = Seq(
- Row(1, "111", time),
- Row(2, "222", time)
- )
-
- val sampleDataDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
-
- it("simple source to target") {
- execute(createTableSql)
- writeDataToSource(sampleDataDf, sourceTableName)
- runJob(jobParameters)
- val resultDf = readFromSource("target")
- assertSmallDataFrameEquality(resultDf, sampleDataDf, orderedComparison = false)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SparkSessionIsolationSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SparkSessionIsolationSpec.scala
deleted file mode 100644
index 3d164f9..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/SparkSessionIsolationSpec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.core.util.WorkflowReader
-import ETLSuit.runJob
-import org.scalatest.DoNotDiscover
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.should
-
-@DoNotDiscover
-class SparkSessionIsolationSpec extends AnyFunSpec with should.Matchers {
- it("it should read spark conf correctly") {
- val steps = WorkflowReader.readWorkflow("session_isolation").steps
-
- steps.length should be(3)
-
- steps.head.conf.isEmpty should be(true)
- steps(1).conf should be(Map(
- "spark.sql.shuffle.partitions" -> "1"
- ))
- steps(2).conf should be(Map(
- "spark.sql.shuffle.partitions" -> "5"
- ))
- }
-
- it("works with sql config file") {
- val command = Array("single-job",
- "--name=session_isolation", "--period=1440",
- "--local", "--env=test", "--once")
-
- runJob(command)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/TaskDependenciesSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/TaskDependenciesSpec.scala
deleted file mode 100644
index 9f6bb75..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/TaskDependenciesSpec.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.core.util.WorkflowReader
-import ETLSuit.runJob
-import org.mockito.ArgumentMatchers.anyString
-import org.mockito.MockitoSugar.{when, withObjectMocked}
-import org.scalatest.DoNotDiscover
-
-/**
- * 1. create deps(task-a, task-b, task-c(depends on task-a, task-b))
- * 2. run task-c (do nothing)
- * 3. run task-a (success)
- * 4. run task-c (do nothing)
- * 5. run task-b (success)
- * 6. run task-c (success)
- */
-@DoNotDiscover
-class TaskDependenciesSpec extends ETLSuit {
- override val createTableSql: String = ""
- override val targetDbName = "int_test"
- override val sourceDbName: String = "int_test"
-
- def jobParameters(jobName: String): Array[String] = Array("single-job",
- s"--name=$jobName", "--period=1440",
- "--local", "--env=test", "--once")
-
-
- it("should respect to job dependencies") {
-
- // 1. run jobDependencyCheck (do nothing)
- assertThrows[JobFailedException] {
- runJob(jobParameters("jobDependencyCheck"))
- }
-
- withObjectMocked[WorkflowReader.type] {
- when(WorkflowReader.readWorkflow(anyString())).thenReturn(workflow("task-a"))
- // 2. run task-a (success)
- runJob(jobParameters("task-a"))
- }
-
- // 3. run jobDependencyCheck (do nothing)
- assertThrows[JobFailedException] {
- runJob(jobParameters("jobDependencyCheck"))
- }
-
- withObjectMocked[WorkflowReader.type] {
- when(WorkflowReader.readWorkflow(anyString())).thenReturn(workflow("task-b"))
- // 4. run task-b (success)
- runJob(jobParameters("task-b"))
- }
-
- // 5. run jobDependencyCheck (do nothing)
- runJob(jobParameters("jobDependencyCheck"))
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/UDFSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/UDFSpec.scala
deleted file mode 100644
index 43ef43c..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/UDFSpec.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.scalatest.DoNotDiscover
-
-import scala.jdk.CollectionConverters._
-
-class TestUdfObj extends Serializable {
- def testUdf(value: String): String = {
- s"$value-proceed-by-udf"
- }
-}
-
-@DoNotDiscover
-class UDFSpec extends ETLSuit with Serializable {
-
- override val createTableSql: String = ""
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "test_delta_table"
- override val targetDbName: String = "int_test"
-
- val firstDay = "2021-10-01 00:00:00"
- val secondDay = "2021-10-02 00:00:00"
-
- val source2odsParameters: Array[String] = Array("single-job",
- "--name=udf_test", "--period=1440",
- "--local", s"--default-start-time=${firstDay}", "--env=test", "--once")
-
- it("should call udf") {
- runJob(source2odsParameters)
-
- spark.sql("select * from udf_result").collectAsList().asScala.head.get(0) should be("input-proceed-by-udf")
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaLakeSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaLakeSpec.scala
deleted file mode 100644
index 32ec537..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaLakeSpec.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.delta
-
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-
-@DoNotDiscover
-class DeltaLakeSpec extends DeltaSuit {
-
- override val createTableSql: String = ""
- override val sourceDbName: String = "int_test"
- val sourceTableName: String = "test_delta_table"
- override val targetDbName: String = "int_test"
-
- val firstDay = "2021-10-01 00:00:00"
- val secondDay = "2021-10-02 00:00:00"
-
- val source2odsParameters: Array[String] = Array("single-job",
- "--name=test_auto_create_dim_source_delta", "--period=1440",
- "--local", s"--default-start-time=${firstDay}", "--env=test", "--once")
-
- val sourceSchema = List(
- StructField("id", StringType, true),
- StructField("bz_time", TimestampType, true)
- )
-
- val firstDayData = Seq(
- Row("1",
- getTimeStampFromStr("2022-02-02 17:12:59")
- )
- )
-
- lazy val firstDayDf = spark.createDataFrame(
- spark.sparkContext.parallelize(firstDayData),
- StructType(sourceSchema)
- )
-
- it("delta should works") {
- if (spark.version.startsWith("2.3")) {
- ETLLogger.error("Delta Lake does NOT support Spark 2.3.x")
- } else if (spark.version.startsWith("2.4") || spark.version.startsWith("3.0") || spark.version.startsWith("3.1")) {
- ETLLogger.error("Delta Lake does not works well on Spark 2.4.x, " +
- "CREATE TABLE USING delta is not supported by Spark before 3.0.0 and Delta Lake before 0.7.0.")
- } else {
- runJob(source2odsParameters)
-
- val result = spark.sql("select * from delta_db.test_fact")
- assertSmallDataFrameEquality(result, firstDayDf, orderedComparison = false)
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuit.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuit.scala
deleted file mode 100644
index 174c5ba..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuit.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.delta
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import com.github.sharpdata.sharpetl.spark.extension.UdfInitializer
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.SparkSession
-
-trait DeltaSuit extends ETLSuit {
- override lazy val spark: SparkSession = {
- ETLSparkSession.local = true
- val session = SparkSession
- .builder()
- .master("local")
- .appName("spark session")
- .config("spark.sql.shuffle.partitions", "1")
- .config("spark.sql.legacy.timeParserPolicy", "LEGACY")
- .config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
- .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")
- .config("spark.sql.sources.partitionOverwriteMode", "dynamic")
- .config("spark.sql.catalogImplementation", "hive")
- .getOrCreate()
- UdfInitializer.init(session)
- session
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuitExecutor.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuitExecutor.scala
deleted file mode 100644
index 154026b..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/DeltaSuitExecutor.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.delta
-
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.FixedMySQLContainer
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, Sequential}
-
-@DoNotDiscover
-class DeltaSuitExecutor extends Sequential(
- new DeltaLakeSpec,
- new FlyDeltaSpec
-) with BeforeAndAfterAll {
- val logMysql = new FixedMySQLContainer("mysql:5.7")
-
- override protected def beforeAll(): Unit = {
- logMysql.configurePort(2333, "sharp_etl")
- logMysql.start()
- ETLConfig.reInitProperties()
- MyBatisSession.reloadFactory()
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- logMysql.stop()
- super.afterAll()
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/FlyDeltaSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/FlyDeltaSpec.scala
deleted file mode 100644
index fb3ef5d..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/delta/FlyDeltaSpec.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.delta
-
-import com.github.sharpdata.sharpetl.core.util.ETLLogger
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.test.DataFrameComparer
-import org.scalatest.funspec.AnyFunSpec
-import org.scalatest.matchers.should
-import org.scalatest.{BeforeAndAfterEach, DoNotDiscover}
-
-@DoNotDiscover
-class FlyDeltaSpec extends AnyFunSpec
- with should.Matchers
- with DeltaSuit
- with DataFrameComparer
- with BeforeAndAfterEach {
-
- it("should just run with delta") {
- if (spark.version.startsWith("2.3")) {
- ETLLogger.error("Delta Lake does NOT support Spark 2.3.x")
- } else if (spark.version.startsWith("2.4") || spark.version.startsWith("3.0") || spark.version.startsWith("3.1")) {
- ETLLogger.error("Delta Lake does not works well on Spark 2.4.x, " +
- "CREATE TABLE USING delta is not supported by Spark before 3.0.0 and Delta Lake before 0.7.0.")
- } else {
- val filePath = getClass.getResource("/application-delta.properties").toString
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=hello_delta",
- "--local", "--env=test", "--once", s"--property=$filePath")
-
- runJob(jobParameters)
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/AutoCreateDimSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/AutoCreateDimSpec.scala
deleted file mode 100644
index 117b3ef..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/AutoCreateDimSpec.scala
+++ /dev/null
@@ -1,280 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.hive
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-
-
-@DoNotDiscover
-class AutoCreateDimSpec extends HiveSuit {
-
- val productSchema = List(
- StructField("mid", StringType, true),
- StructField("name", StringType, true),
- StructField("product_version", StringType, true),
- StructField("product_status", StringType, true),
- StructField("start_time", TimestampType, true),
- StructField("end_time", TimestampType, true),
- StructField("is_latest", StringType, true),
- StructField("is_active", StringType, true),
- StructField("is_auto_created", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val classSchema = List(
- StructField("class_code", StringType, true),
- StructField("class_name", StringType, true),
- StructField("class_address", StringType, true),
- StructField("start_time", TimestampType, true),
- StructField("end_time", TimestampType, true),
- StructField("is_latest", StringType, true),
- StructField("is_active", StringType, true),
- StructField("is_auto_created", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val orderSchema = List(
- StructField("order_sn", StringType, true),
- StructField("product_id", StringType, true),
- StructField("user_id", StringType, true),
- StructField("class_id", StringType, true),
- StructField("product_count", StringType, true),
- StructField("price", DoubleType, true),
- StructField("discount", DoubleType, true),
- StructField("order_status", StringType, true),
- StructField("order_create_time", TimestampType, true),
- StructField("order_update_time", TimestampType, true),
- StructField("actual", StringType, true),
- StructField("job_id", StringType, true),
- StructField("start_time", TimestampType, true),
- StructField("end_time", TimestampType, true),
- StructField("is_latest", StringType, true),
- StructField("is_active", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val filePath = getClass.getResource("/application-test.properties").toString
-
- it("[ALWAYS & ONCE & NEVER] create dim") {
- spark.sql("""create database if not exists ods""".stripMargin)
- spark.sql("""create database if not exists dim""".stripMargin)
- spark.sql("""create database if not exists dwd""".stripMargin)
- spark.sql(
- """
- |CREATE TABLE ods.t_order
- |(
- | order_id string,
- | order_sn string,
- | product_code string,
- | product_name string,
- | product_version string,
- | product_status string,
- | user_code string,
- | user_name string,
- | user_age int,
- | user_address string,
- | class_code string,
- | class_name string,
- | class_address string,
- | product_count int,
- | price double,
- | discount double,
- | order_status string,
- | order_create_time timestamp,
- | order_update_time timestamp
- |)
- | partitioned by (year string, month string, day string)""".stripMargin)
-
-
- spark.sql(
- """
- |CREATE TABLE dwd.t_fact_order
- |(
- | order_id string,
- | order_sn string,
- | product_id string,
- | user_id string,
- | class_id string,
- | product_count string,
- | price double,
- | discount double,
- | order_status string,
- | order_create_time timestamp,
- | order_update_time timestamp,
- | actual double,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string
- |)
- | partitioned by (year string, month string, day string)""".stripMargin)
-
- spark.sql(
- """
- |insert into ods.t_order partition (year = '2022', month = '03', day = '13')
- |values ("o_2022_03_13_01", "sn_o_2022_03_13_01", "p_001", "new product name", "v1", "good",
- | "user_code_01", "zhangsan", 18, "Mars",
- | "0708", "高三一班", "Mars",
- | 2, 23.33, 6.66, "created",
- | cast('2022-03-13 09:00:00' as timestamp), cast('2022-03-13 11:00:00' as timestamp)),
- | ("o_2022_03_13_01","sn_o_2022_03_13_01", "p_001", "old product name", "v1", "good",
- | "user_code_01", "zhangsan", 17, "Mars",
- | "0708", "高三一班", "Mars",
- | 2, 23.33, 6.66, "created",
- | cast('2022-03-13 09:00:00' as timestamp), cast('2022-03-13 09:00:00' as timestamp)),
- | ("o_2022_03_13_02", "sn_o_2022_03_13_02", "p_002", "product name2", "v1", "bad",
- | "user_code_02", "lisi", 17, "Mars",
- | "0709", "高三二班", "Mars",
- | 2, 23.33, 6.66, "created",
- | cast('2022-03-13 15:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp)),
- | ("o_2022_03_13_04", "sn_o_2022_03_13_04", "p_003", "product name3", "v1", "bad",
- | "user_code_02", "lisi", 17, "Mars",
- | "0709", "高三二班", "Mars",
- | 2, 23.33, 6.66, "created",
- | cast('2022-03-13 09:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp)),
- | ("o_2022_03_13_05", "sn_o_2022_03_13_05", "p_004", "new product name4", "v1", "bad",
- | "user_code_02", "wangwu", 19, "Mars",
- | "0709", "高三二班", "Mars",
- | 2, 23.33, 6.66, "created",
- | cast('2022-03-13 09:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp))
- |""".stripMargin)
-
-
- spark.sql(
- """
- |CREATE TABLE dim.t_dim_class
- |(
- | class_id string,
- | class_code string,
- | class_name string,
- | class_address string,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string,
- | is_auto_created string
- |)
- | partitioned by (year string, month string, day string)
- |""".stripMargin)
-
- spark.sql(
- """
- |insert into dim.t_dim_class partition (year = '2022', month = '03', day = '13')
- |values ("123 123 123", "0708", "高三一班", "Moon",
- |'1', cast('2022-03-10 15:00:00' as timestamp), null, '1', '1', '0')
- |""".stripMargin)
-
-
- spark.sql(
- """
- |CREATE TABLE dim.t_dim_product
- |(
- | product_id string,
- | mid string,
- | name string,
- | product_version string,
- | product_status string,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string,
- | is_auto_created string
- |)
- | partitioned by (year string, month string, day string)
- |""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE IF NOT EXISTS dim.t_dim_user
- |(
- | dim_user_id string,
- | user_info_code string,
- | user_name string,
- | user_age integer,
- | user_address string,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string,
- | is_auto_created string
- |)
- | partitioned by (year string, month string, day string)
- |""".stripMargin)
-
- spark.sql(
- """
- |insert into dim.t_dim_product partition (year = '2022', month = '03', day = '13')
- |values ("33333333333", "p_003", "product name3", "v1", "bad",
- |'1', cast('2022-03-13 09:00:00' as timestamp), null, '1', '1', '0'),
- |("44444444", "p_004", "product name4", "v1", "bad",
- |'1', cast('2022-03-13 09:00:00' as timestamp), null, '1', '1', '0')
- |""".stripMargin)
-
-
- runJob(Array("batch-job",
- "--names=auto_create_dim", "--period=1440",
- "--default-start=20220313000000", "--log-driven-type=timewindow",
- "--env=embedded-hive", "--once", s"--property=$filePath"))
-
- val productDf = spark.sql("""select * from dim.t_dim_product""".stripMargin)
- //productDf.show(100, truncate = false)
-
- val newCreatedData = Seq(
- Row("p_003", "product name3", "v1", "bad", getTimeStampFromStr("2022-03-13 09:00:00"),
- null, "1", "1", "0", "2022", "03", "13"
- ),
- Row("p_004", "product name4", "v1", "bad", getTimeStampFromStr("2022-03-13 09:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), "0", "0", "0", "2022", "03", "13"
- ),
- Row("p_001", "old product name", "v1", "good", getTimeStampFromStr("2022-03-13 09:00:00"),
- getTimeStampFromStr("2022-03-13 11:00:00"), "0", "0", "1", "2022", "03", "13"
- ),
- Row("p_001", "new product name", "v1", "good", getTimeStampFromStr("2022-03-13 11:00:00"),
- null, "1", "1", "1", "2022", "03", "13"
- ),
- Row("p_002", "product name2", "v1", "bad", getTimeStampFromStr("2022-03-13 15:00:00"),
- null, "1", "1", "1", "2022", "03", "13"
- ),
- Row("p_004", "new product name4", "v1", "bad", getTimeStampFromStr("2022-03-13 15:00:00"),
- null, "1", "1", "1", "2022", "03", "13"
- )
- )
-
- val productShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(productSchema)
- )
-
- assertSmallDataFrameEquality(productDf.drop("product_id", "job_id"), productShouldBe, orderedComparison = false)
-
-
- val classDf = spark.sql("""select * from dim.t_dim_class""".stripMargin)
- //classDf.show(100, truncate = false)
-
- val classData = Seq(
- Row("0708", "高三一班", "Moon", getTimeStampFromStr("2022-03-10 15:00:00"),
- null, "1", "1", "0", "2022", "03", "13"
- ), Row("0709", "高三二班", "Mars", getTimeStampFromStr("2022-03-13 15:00:00"),
- null, "1", "1", "1", "2022", "03", "13"
- )
- )
-
- val classShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(classData),
- StructType(classSchema)
- )
-
- assertSmallDataFrameEquality(classDf.drop("class_id", "job_id"), classShouldBe, orderedComparison = false)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/DimStudentModelingSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/DimStudentModelingSpec.scala
deleted file mode 100644
index df63237..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/DimStudentModelingSpec.scala
+++ /dev/null
@@ -1,184 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.hive
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import com.github.sharpdata.sharpetl.datasource.kafka.DFConversations._
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types.{StringType, StructField, StructType, TimestampType}
-import org.scalatest.DoNotDiscover
-
-
-@DoNotDiscover
-class DimStudentModelingSpec extends HiveSuit {
-
- val dwdSchema = List(
- StructField("student_code", StringType, true),
- StructField("student_name", StringType, true),
- StructField("student_age", StringType, true),
- StructField("student_address", StringType, true),
- StructField("student_create_time", TimestampType, true),
- StructField("student_update_time", TimestampType, true),
- StructField("start_time", TimestampType, true),
- StructField("end_time", TimestampType, true),
- StructField("is_latest", StringType, true),
- StructField("is_active", StringType, true),
- StructField("is_auto_created", StringType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val filePath = getClass.getResource("/application-test.properties").toString
-
- it("[DIM&INC] new data only") {
-
- spark.sql("""create database if not exists ods""".stripMargin)
- spark.sql("""create database if not exists dim""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE ods.t_student
- |(
- | student_code string,
- | student_name string,
- | student_age string,
- | student_address string,
- | student_blabla string,
- | student_create_time timestamp,
- | student_update_time timestamp
- |)
- | partitioned by (year string, month string, day string)""".stripMargin)
-
- //new created user
- spark.sql(
- """
- |insert into ods.t_student partition (year = '2022', month = '03', day = '13')
- |values ("zhang san", "user name", "18", "user address", "blabala", cast('2022-03-13 10:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp))
- |""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE dim.t_dim_student
- |(
- | student_id string,
- | student_code string,
- | student_name string,
- | student_age string,
- | student_address string,
- | student_create_time timestamp,
- | student_update_time timestamp,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string,
- | is_auto_created string
- |)
- | partitioned by (year string, month string, day string)
- |""".stripMargin)
-
- runJob(Array("batch-job",
- "--names=dim_student", "--period=1440",
- "--default-start=20220313000000", "--log-driven-type=timewindow",
- "--env=embedded-hive", "--once", s"--property=$filePath"))
-
- val df = spark.sql("""select * from dim.t_dim_student""")
-
- val newCreatedData = Seq(
- Row("zhang san", "user name", "18", "user address", getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), getTimeStampFromStr("2022-03-13 10:00:00"),
- null, "1", "1", "0", "2022", "03", "13"
- )
- )
-
- val shouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdSchema)
- )
-
- assertSmallDataFrameEquality(df.drop("student_id", "job_id"), shouldBe)
- }
-
- it("[DIM&INC] new data with updated data") {
- spark.sql(
- """
- |insert into ods.t_student partition (year = '2022', month = '03', day = '14')
- |values ("zhang san", "new user name", "19", "new user address", "blabala", cast('2022-03-13 15:00:00' as timestamp), cast('2022-03-14 18:00:00' as timestamp)),
- | ("li si", "li si si li", "20", "lisi user address", "blabala", cast('2022-03-14 11:00:00' as timestamp), cast('2022-03-14 11:00:00' as timestamp))
- |""".stripMargin)
-
- runJob(Array("batch-job",
- "--names=dim_student", "--period=1440",
- "--default-start=20220313000000", "--log-driven-type=timewindow",
- "--env=embedded-hive", "--once", s"--property=$filePath"))
-
- val df = spark.sql("""select * from dim.t_dim_student""")
-
- val newCreatedData = Seq(
- Row("zhang san", "user name", "18", "user address", getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-14 18:00:00"), "0", "0", "0", "2022", "03", "13"
- ), Row("zhang san", "new user name", "19", "new user address", getTimeStampFromStr("2022-03-13 15:00:00"),
- getTimeStampFromStr("2022-03-14 18:00:00"), getTimeStampFromStr("2022-03-14 18:00:00"),
- null, "1", "1", "0", "2022", "03", "13"
- ), Row("li si", "li si si li", "20", "lisi user address", getTimeStampFromStr("2022-03-14 11:00:00"),
- getTimeStampFromStr("2022-03-14 11:00:00"), getTimeStampFromStr("2022-03-14 11:00:00"),
- null, "1", "1", "0", "2022", "03", "14"
- )
- )
-
- val shouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdSchema)
- )
-
- assertSmallDataFrameEquality(df.drop("student_id", "job_id"), shouldBe, orderedComparison = false)
-
- assertSmallDataFrameEquality(spark.sql("select job_id from dim.t_dim_student where year = '2022' and month = '03' and day = '13' limit 1"),
- spark.sql("select job_id from dim.t_dim_student where year = '2022' and month = '03' and day = '14' limit 1"))
- }
-
- it("[DIM&INC] new data with updated data (should keep old partition unchanged)") {
- spark.sql(
- """
- |insert into ods.t_student partition (year = '2022', month = '03', day = '15')
- |values ("zhang san", "another new user name", "19", "new user address", "blabala", cast('2022-03-13 15:00:00' as timestamp), cast('2022-03-15 15:00:00' as timestamp)),
- | ("wang wu", "li si si li", "20", "lisi user address", "blabala", cast('2022-03-15 11:00:00' as timestamp), cast('2022-03-15 11:00:00' as timestamp))
- |""".stripMargin)
-
- runJob(Array("batch-job",
- "--names=dim_student", "--period=1440",
- "--default-start=20220313000000", "--log-driven-type=timewindow",
- "--env=embedded-hive", "--once", s"--property=$filePath"))
-
- val df = spark.sql("""select * from dim.t_dim_student""")
-
- val newCreatedData = Seq(
- Row("zhang san", "user name", "18", "user address", getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-14 18:00:00"), "0", "0", "0", "2022", "03", "13"
- ), Row("zhang san", "new user name", "19", "new user address", getTimeStampFromStr("2022-03-13 15:00:00"),
- getTimeStampFromStr("2022-03-14 18:00:00"), getTimeStampFromStr("2022-03-14 18:00:00"),
- getTimeStampFromStr("2022-03-15 15:00:00"), "0", "0", "0", "2022", "03", "13"
- ), Row("zhang san", "another new user name", "19", "new user address", getTimeStampFromStr("2022-03-13 15:00:00"),
- getTimeStampFromStr("2022-03-15 15:00:00"), getTimeStampFromStr("2022-03-15 15:00:00"),
- null, "1", "1", "0", "2022", "03", "13"
- ), Row("li si", "li si si li", "20", "lisi user address", getTimeStampFromStr("2022-03-14 11:00:00"),
- getTimeStampFromStr("2022-03-14 11:00:00"), getTimeStampFromStr("2022-03-14 11:00:00"),
- null, "1", "1", "0", "2022", "03", "14"
- ), Row("wang wu", "li si si li", "20", "lisi user address", getTimeStampFromStr("2022-03-15 11:00:00"),
- getTimeStampFromStr("2022-03-15 11:00:00"), getTimeStampFromStr("2022-03-15 11:00:00"),
- null, "1", "1", "0", "2022", "03", "15"
- )
- )
-
- val shouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdSchema)
- )
-
- assertSmallDataFrameEquality(df.drop("student_id", "job_id"), shouldBe, orderedComparison = false)
-
- assertSmallDataFrameEquality(spark.sql("select job_id from dim.t_dim_student where year = '2022' and month = '03' and day = '13' limit 1"),
- spark.sql("select job_id from dim.t_dim_student where year = '2022' and month = '03' and day = '15' limit 1"))
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/FactEventModelingSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/FactEventModelingSpec.scala
deleted file mode 100644
index 39e11eb..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/FactEventModelingSpec.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.hive
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types.{StringType, StructField, StructType, TimestampType}
-import org.scalatest.DoNotDiscover
-
-
-@DoNotDiscover
-class FactEventModelingSpec extends HiveSuit {
-
- val dwdSchema = List(
- StructField("event_id", StringType, true),
- StructField("event_status", StringType, true),
- StructField("create_time", TimestampType, true),
- StructField("update_time", TimestampType, true),
- StructField("year", StringType, true),
- StructField("month", StringType, true),
- StructField("day", StringType, true)
- )
-
- val filePath = getClass.getResource("/application-test.properties").toString
-
- it("Fact event, incremental & no SCD") {
-
- spark.sql("""create database if not exists ods""".stripMargin)
- spark.sql("""create database if not exists dwd""".stripMargin)
- spark.sql("""create database if not exists dim""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE ods.t_event
- |(
- | event_id string,
- | device_IMEI string,
- | device_model string,
- | device_version string,
- | device_language string,
- | event_status string,
- | create_time timestamp,
- | update_time timestamp,
- | job_id string
- |)
- |partitioned by (year string, month string, day string)""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE dwd.t_fact_event
- |(
- | event_id string,
- | device_id string,
- | event_status string,
- | create_time timestamp,
- | update_time timestamp,
- | job_id string
- |)
- | partitioned by (year string, month string, day string)""".stripMargin)
-
- spark.sql(
- """
- |insert into dwd.t_fact_event partition (year = '2022', month = '03', day = '11')
- |values
- |("00001", "-1", "ONLINE", cast('2022-03-11 10:00:00' as timestamp), cast('2022-03-11 15:00:00' as timestamp), '1')
- |""".stripMargin)
-
- spark.sql(
- """
- |insert into dwd.t_fact_event partition (year = '2022', month = '03', day = '12')
- |values
- |("00002", "-1", "ONLINE", cast('2022-03-12 10:00:00' as timestamp), cast('2022-03-12 15:00:00' as timestamp), '1')
- |""".stripMargin)
-
- spark.sql(
- """
- |insert into ods.t_event partition (year = '2022', month = '03', day = '13')
- |values
- |("00001", "111", "iPhone14","16", "CHN", "ONLINE", cast('2022-03-11 10:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp), '1'),
- |("00003", "333", "iPhone14 pro max", "18", "CHN", "ONLINE", cast('2022-03-13 10:00:00' as timestamp), cast('2022-03-13 15:00:00' as timestamp), '1')
- |""".stripMargin)
-
- spark.sql(
- """
- |CREATE TABLE dim.t_dim_device
- |(
- | device_id string,
- | device_imei string,
- | device_model string,
- | device_version string,
- | device_language string,
- | create_time timestamp,
- | update_time timestamp,
- | job_id string,
- | start_time timestamp,
- | end_time timestamp,
- | is_latest string,
- | is_active string,
- | is_auto_created string
- |)
- |partitioned by (year string, month string, day string)
- |""".stripMargin)
-
- runJob(Array("batch-job",
- "--names=fact_event", "--period=1440",
- "--default-start=20220313000000", "--log-driven-type=timewindow",
- "--env=embedded-hive", "--once", s"--property=$filePath"))
-
- val df = spark.sql("""select * from dwd.t_fact_event""")
-
- val data = Seq(
- Row("00001", "ONLINE", getTimeStampFromStr("2022-03-11 10:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), "2022", "03", "11"),
- Row("00002", "ONLINE", getTimeStampFromStr("2022-03-12 10:00:00"),
- getTimeStampFromStr("2022-03-12 15:00:00"), "2022", "03", "12"),
- Row("00003", "ONLINE", getTimeStampFromStr("2022-03-13 10:00:00"),
- getTimeStampFromStr("2022-03-13 15:00:00"), "2022", "03", "13")
- )
-
- val shouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(dwdSchema)
- )
-
- assertSmallDataFrameEquality(df.drop("device_id", "job_id"), shouldBe)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuit.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuit.scala
deleted file mode 100644
index e6bb7b4..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuit.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.hive
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import com.github.sharpdata.sharpetl.spark.utils.EmbeddedHive.sparkWithEmbeddedHive
-import org.apache.spark.sql.SparkSession
-
-trait HiveSuit extends ETLSuit {
- override lazy val spark: SparkSession = sparkWithEmbeddedHive
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuitExecutor.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuitExecutor.scala
deleted file mode 100644
index aa8fc39..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/hive/HiveSuitExecutor.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.hive
-
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.FixedMySQLContainer
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, Sequential}
-
-@DoNotDiscover
-class HiveSuitExecutor extends Sequential(
- new DimStudentModelingSpec,
- new AutoCreateDimSpec,
- new FactEventModelingSpec
-) with BeforeAndAfterAll {
- val logMysql = new FixedMySQLContainer("mysql:5.7")
-
- override protected def beforeAll(): Unit = {
- logMysql.configurePort(2333, "sharp_etl")
- logMysql.start()
- ETLConfig.reInitProperties()
- MyBatisSession.reloadFactory()
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- logMysql.stop()
- super.afterAll()
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MyqlSuit.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MyqlSuit.scala
deleted file mode 100644
index cff0e5c..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MyqlSuit.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.mysql
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import org.apache.spark.sql.DataFrame
-
-import java.sql.{DriverManager, SQLException}
-
-trait MysqlSuit extends ETLSuit {
- var dataPort: Int = 2334
-
- def writeDataToSource(sampleDataDf: DataFrame, tableName: String): Unit = {
- sampleDataDf.write
- .format("jdbc")
- .option("url", s"jdbc:mysql://localhost:$dataPort/$sourceDbName")
- .option("dbtable", tableName)
- .option("user", "admin")
- .option("password", "admin")
- .mode("append")
- .save()
- }
-
- def readFromSource(targetTable: String): DataFrame = {
- spark.read
- .format("jdbc")
- .option("url", s"jdbc:mysql://localhost:$dataPort/int_test")
- .option("dbtable", targetTable)
- .option("user", "admin")
- .option("password", "admin")
- .load()
- .drop("job_id", "job_time")
- }
-
- def executeInSource(sql: String, dbName: String): Boolean = {
- val url = s"jdbc:mysql://localhost:$dataPort/$dbName"
- val connection = DriverManager.getConnection(url, "admin", "admin")
- val statement = connection.createStatement()
- try {
- statement.execute(sql)
- } catch {
- case ex: SQLException => throw new RuntimeException(ex)
- } finally {
- if (connection != null) connection.close()
- if (statement != null) statement.close()
- }
- }
-
- def execute(sql: String): Boolean = {
- executeInSource(sql, targetDbName)
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MysqlSuitExecutor.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MysqlSuitExecutor.scala
deleted file mode 100644
index 8f3d3b9..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/mysql/MysqlSuitExecutor.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.mysql
-
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import com.github.sharpdata.sharpetl.spark.datasource.HttpDataSourceSpec
-import com.github.sharpdata.sharpetl.spark.end2end._
-import com.github.sharpdata.sharpetl.spark.transformation._
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, Suites}
-import org.testcontainers.containers.MySQLContainer
-
-// All suite in Suites are run in parallel, need to use Sequential if we want them to run in order
-@DoNotDiscover
-class MysqlSuitExecutor extends Suites(
- new DailyJobsSummaryReportTransformSpec,
- new HttpDataSourceSpec,
- new DynamicLoadingTransformerSpec,
- new Source2TargetSpec,
- new BatchJobSpec,
- new TaskDependenciesSpec,
- new SkipRunningJobSpec,
- new IncrementalAutoIncIDModeSpec,
- new JdbcTransformerSpec,
- new SparkSessionIsolationSpec,
- new ReplaceTemplateTableNameSpec,
- new DataQualityCheckSpec,
- new UDFSpec
-) with BeforeAndAfterAll {
-
- val migrationMysql = new FixedMySQLContainer("mysql:5.7")
- val dataMysql = new FixedMySQLContainer("mysql:5.7")
-
- override protected def beforeAll(): Unit = {
- migrationMysql.configurePort(2333, "sharp_etl")
- migrationMysql.start()
-
- dataMysql.configurePort(2334, "int_test")
- dataMysql.start()
-
- ETLConfig.reInitProperties()
- MyBatisSession.reloadFactory()
-
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- migrationMysql.stop()
- dataMysql.stop()
- super.afterAll()
- }
-}
-
-class FixedMySQLContainer(val dockerImageName: String) extends MySQLContainer(dockerImageName) {
- def configurePort(port: Int, dbName: String): FixedMySQLContainer = {
- super.addFixedExposedPort(port, 3306)
- super.withEnv("MYSQL_ROOT_PASSWORD", "root")
- //super.withEnv("TZ", "Asia/Shanghai")
- super.withUsername("admin")
- super.withPassword("admin")
- super.withDatabaseName(dbName)
- super.withReuse(true)
- this
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/FixedPostgresContainer.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/FixedPostgresContainer.scala
deleted file mode 100644
index 0d75e5b..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/FixedPostgresContainer.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.postgres
-
-import org.testcontainers.containers.PostgreSQLContainer
-
-class FixedPostgresContainer (val dockerImageName: String) extends PostgreSQLContainer(dockerImageName) {
- def configurePort(port: Int, dbName: String): FixedPostgresContainer = {
- super.addFixedExposedPort(port, 5432)
- super.withUsername("postgres")
- super.withPassword("postgres")
- super.withDatabaseName(dbName)
- this
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresModelingSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresModelingSpec.scala
deleted file mode 100644
index 8961445..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresModelingSpec.scala
+++ /dev/null
@@ -1,756 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.postgres
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit.runJob
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.functions.{col, when}
-import org.apache.spark.sql.types._
-import org.scalatest.DoNotDiscover
-
-
-@DoNotDiscover
-class PostgresModelingSpec extends PostgresEtlSuit {
-
- val odsOrderSchema = List(
- StructField("order_sn", StringType, true),
- StructField("product_code", StringType, true),
- StructField("product_name", StringType, true),
- StructField("product_version", StringType, true),
- StructField("product_status", StringType, true),
- StructField("user_code", StringType, true),
- StructField("user_name", StringType, true),
- StructField("user_age", IntegerType, true),
- StructField("user_address", StringType, true),
- StructField("product_count", IntegerType, true),
- StructField("price", DecimalType(10, 4), true),
- StructField("discount", DecimalType(10, 4), true),
- StructField("order_status", StringType, true),
- StructField("order_create_time", TimestampType, true),
- StructField("order_update_time", TimestampType, true)
- )
-
- val dwdOrderNoSCDSchema = List(
- StructField("order_sn", StringType, true),
- StructField("product_id", StringType, true),
- StructField("user_id", StringType, true),
- StructField("product_count", IntegerType, true),
- StructField("price", DecimalType(10, 4), true),
- StructField("discount", DecimalType(10, 4), true),
- StructField("order_status", StringType, true),
- StructField("order_create_time", TimestampType, true),
- StructField("order_update_time", TimestampType, true),
- StructField("actual", DecimalType(10, 4), true)
- )
-
- val dwdOrderSCDSchema = List(
- StructField("order_sn", StringType, true),
- StructField("product_id", StringType, true),
- StructField("user_id", StringType, true),
- StructField("product_count", IntegerType, true),
- StructField("price", DecimalType(10, 4), true),
- StructField("discount", DecimalType(10, 4), true),
- StructField("order_status", StringType, true),
- StructField("order_create_time", TimestampType, true),
- StructField("order_update_time", TimestampType, true),
- StructField("actual", DecimalType(10, 4), true),
- StructField("start_time", TimestampType, true),
- StructField("end_time", TimestampType, true),
- StructField("is_active", StringType, true),
- StructField("is_latest", StringType, true)
- )
-
- val filePath = getClass.getResource("/application-test.properties").toString
-
- it("source -> ods test") {
- val createSchemaSql =
- """
- |CREATE SCHEMA IF NOT EXISTS sales;
- |CREATE SCHEMA IF NOT EXISTS ods;""".stripMargin
- execute(createSchemaSql, "postgres", 5432)
-
- val createSourceTableSql =
- """
- |drop table if exists sales.order;
- |create table if not exists sales.order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp
- |);""".stripMargin
- execute(createSourceTableSql, "postgres", 5432)
-
- val initSourceDataSql =
- """
- |insert into sales.order (order_sn, product_code, product_name, product_version, product_status, user_code, user_name, user_age, user_address, product_count, price, discount, order_status, order_create_time, order_update_time)
- |values ('AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00')
- | , ('BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 10, 0.3, 1, '2022-04-04 10:00:00', '2022-04-08 10:00:00')
- | , ('DDD', 'p1', '华为', 'mate40-v2', '上架', 'u2', '李四', 32, '迎宾街道', 15, 200, 0.4, 1, '2022-04-08 09:00:00', '2022-04-08 10:00:00');""".stripMargin
- execute(initSourceDataSql, "postgres", 5432)
-
- val createOdsTableSql =
- """
- |drop table if exists ods.t_order;
- |create table if not exists ods.t_order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | job_id varchar(128)
- |) ;""".stripMargin
- execute(createOdsTableSql, "postgres", 5432)
-
- runJob(Array("single-job",
- s"--name=source_to_ods", "--period=1440",
- s"--default-start-time=2022-04-08 00:00:00", "--once", "--local", s"--property=$filePath"))
-
- val odsDf = readTable("postgres", 5432, "ods.t_order")
- odsDf.show()
- val newCreatedData = Seq(
- Row("AAA", "p1", "华为", "mate40", "上架", "u1", "张三", 12, "胜利街道", 12, BigDecimal(20.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00")),
- Row("BBB", "p1", "华为", "mate40", "上架", "u1", "张三", 12, "胜利街道", 12, BigDecimal(10.0000), BigDecimal(0.3000), "1", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00")),
- Row("DDD", "p1", "华为", "mate40-v2", "上架", "u2", "李四", 32, "迎宾街道", 15, BigDecimal(200.0000), BigDecimal(0.4000), "1", getTimeStampFromStr("2022-04-08 09:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"))
- )
- val odsOrderShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(odsOrderSchema)
- )
- odsOrderShouldBe.show()
-
- assertSmallDataFrameEquality(odsDf.drop("job_id"), odsOrderShouldBe, orderedComparison = false)
-
- }
-
- it("ods -> dwd full & no SCD") {
- val createSchemaSql =
- """
- |CREATE SCHEMA IF NOT EXISTS ods;
- |CREATE SCHEMA IF NOT EXISTS dwd;
- |create extension if not exists "uuid-ossp";""".stripMargin
- execute(createSchemaSql, "postgres", 5432)
-
- val createOdsTableSql =
- """
- |drop table if exists ods.t_order;
- |create table if not exists ods.t_order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | job_id varchar(128)
- |) ;""".stripMargin
- execute(createOdsTableSql, "postgres", 5432)
-
- val initOdsSql =
- """
- |insert into ods.t_order (job_id, order_sn, product_code, product_name, product_version, product_status, user_code, user_name, user_age, user_address, product_count, price, discount, order_status, order_create_time, order_update_time) values
- | (1, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(1, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 11:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(1, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 1, '2022-04-04 12:00:00', '2022-04-08 12:00:00') -- 新增
- |,(2, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(2, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 10:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(2, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 2, '2022-04-04 10:00:00', '2022-04-09 10:00:00') -- 状态更新
- |,(2, 'EEE', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 1.4, 1, '2022-04-09 10:00:00', '2022-04-09 10:00:00'); -- 新增""".stripMargin
- execute(initOdsSql, "postgres", 5432)
-
- val createDwdTableSql =
- """
- |drop table if exists dwd.t_fact_order;
- |create table dwd.t_fact_order(
- | order_sn varchar(128),
- | product_id varchar(128),
- | user_id varchar(128),
- | product_count int,
- | price decimal(10,4),
- | discount decimal(10,4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | actual decimal(10,4)
- |);
- |
- |drop table if exists dwd.t_dim_product;
- |create table dwd.t_dim_product(
- | id varchar(128) default uuid_generate_v1(), -- 渐变id
- | mid varchar(128),
- | name varchar(128),
- | version varchar(128),
- | status varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_user;
- |create table dwd.t_dim_user(
- | user_id varchar(128) default uuid_generate_v1(), -- 渐变id
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);""".stripMargin
- execute(createDwdTableSql, "postgres", 5432)
-
- val initDwdSql =
- """
- |insert into dwd.t_dim_product(id, mid, name, version, status, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- | ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2021-01-01 10:00:00', '2021-01-01 10:00:00', '2021-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_dim_user(user_id, user_code, user_name, user_age, user_address, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- |('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00', '2020-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_fact_order(order_sn, product_id, user_id, product_count, price, discount, order_status, order_create_time, order_update_time, actual) values
- |('AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1, '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7),
- |('BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2, '2022-04-04 11:00:00', '2022-04-08 10:00:00', 9.7),
- |('CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 2, '2022-04-04 12:00:00', '2022-04-07 10:00:00', 29.7);
- |""".stripMargin
- execute(initDwdSql, "postgres", 5432)
-
- val truncateOdsLogSql =
- """
- |truncate table job_log;""".stripMargin
- executeMigration(truncateOdsLogSql)
-
- val truncateOdsLogStepSql =
- """
- |truncate table step_log;""".stripMargin
- executeMigration(truncateOdsLogStepSql)
-
- val initOdsLogSql =
- """
- |insert into job_log (job_id, workflow_name, `period`, job_name, data_range_start, data_range_end, status) values
- |(1, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS'),
- |(2, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS');
- |""".stripMargin
- executeMigration(initOdsLogSql)
-
- runJob(Array("single-job",
- s"--name=ods_to_dwd_full_no_sc", "--period=1440",
- "--local", s"--property=$filePath"))
-
- val dwdDf = readTable("postgres", 5432, "dwd.t_fact_order")
- dwdDf.show()
- val newCreatedData = Seq(
- Row("BBB", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(10.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 11:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(9.7)),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(19.7)),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(199.6)),
- Row("EEE", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(1.4000), "1", getTimeStampFromStr("2022-04-09 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(198.6))
- )
- val dwdOrderShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdOrderNoSCDSchema)
- )
- dwdOrderShouldBe.show()
-
- assertSmallDataFrameEquality(dwdDf.drop("job_id"), dwdOrderShouldBe, orderedComparison = false)
-
- }
-
- it("ods -> dwd incremental & no SCD") {
- val createSchemaSql =
- """
- |CREATE SCHEMA IF NOT EXISTS ods;
- |CREATE SCHEMA IF NOT EXISTS dwd;
- |create extension if not exists "uuid-ossp";""".stripMargin
- execute(createSchemaSql, "postgres", 5432)
-
- val createOdsTableSql =
- """
- |drop table if exists ods.t_order;
- |create table if not exists ods.t_order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | job_id varchar(128)
- |) ;""".stripMargin
- execute(createOdsTableSql, "postgres", 5432)
-
- val initOdsSql =
- """
- |insert into ods.t_order (job_id, order_sn, product_code, product_name, product_version, product_status, user_code, user_name, user_age, user_address, product_count, price, discount, order_status, order_create_time, order_update_time) values
- | (1, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(1, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 11:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(1, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 1, '2022-04-04 12:00:00', '2022-04-08 12:00:00') -- 新增
- |,(2, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 2, '2022-04-04 10:00:00', '2022-04-09 10:00:00') -- 状态更新
- |,(2, 'EEE', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 1.4, 1, '2022-04-09 10:00:00', '2022-04-09 10:00:00'); -- 新增""".stripMargin
- execute(initOdsSql, "postgres", 5432)
-
- val createDwdTableSql =
- """
- |drop table if exists dwd.t_fact_order;
- |create table dwd.t_fact_order(
- | order_sn varchar(128),
- | product_id varchar(128),
- | user_id varchar(128),
- | product_count int,
- | price decimal(10,4),
- | discount decimal(10,4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | actual decimal(10,4)
- |);
- |
- |drop table if exists dwd.t_dim_product;
- |create table dwd.t_dim_product(
- | id varchar(128) default uuid_generate_v1(), -- 渐变id
- | mid varchar(128),
- | name varchar(128),
- | version varchar(128),
- | status varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_user;
- |create table dwd.t_dim_user(
- | user_id varchar(128) default uuid_generate_v1(), -- 渐变id
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);""".stripMargin
- execute(createDwdTableSql, "postgres", 5432)
-
- val initDwdSql =
- """
- |insert into dwd.t_dim_product(id, mid, name, version, status, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- | ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2021-01-01 10:00:00', '2021-01-01 10:00:00', '2021-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_dim_user(user_id, user_code, user_name, user_age, user_address, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- |('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00', '2020-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_fact_order(order_sn, product_id, user_id, product_count, price, discount, order_status, order_create_time, order_update_time, actual) values
- |('AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1, '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7),
- |('BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2, '2022-04-04 11:00:00', '2022-04-08 10:00:00', 9.7),
- |('CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 2, '2022-04-04 12:00:00', '2022-04-07 10:00:00', 29.7);
- |""".stripMargin
- execute(initDwdSql, "postgres", 5432)
-
- val truncateOdsLogSql =
- """
- |truncate table job_log;""".stripMargin
- executeMigration(truncateOdsLogSql)
-
- val truncateOdsLogStepSql =
- """
- |truncate table step_log;""".stripMargin
- executeMigration(truncateOdsLogStepSql)
-
- val initOdsLogSql =
- """
- |insert into job_log (job_id, workflow_name, `period`, job_name, data_range_start, data_range_end, status) values
- |(1, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS'),
- |(2, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS');
- |""".stripMargin
- executeMigration(initOdsLogSql)
-
- runJob(Array("single-job",
- s"--name=ods_to_dwd_incremental_no_sc", "--period=1440",
- "--local", s"--property=$filePath"))
-
- val dwdDf = readTable("postgres", 5432, "dwd.t_fact_order")
- dwdDf.show()
- val newCreatedData = Seq(
- Row("BBB", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(10.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 11:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(9.7)),
- Row("CCC", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(30.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-07 10:00:00"), BigDecimal(29.7)),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(19.7)),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(199.6)),
- Row("EEE", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(1.4000), "1", getTimeStampFromStr("2022-04-09 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(198.6))
- )
- val dwdOrderShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdOrderNoSCDSchema)
- )
- dwdOrderShouldBe.show()
-
- assertSmallDataFrameEquality(dwdDf.drop("job_id"), dwdOrderShouldBe, orderedComparison = false)
-
- }
- it("ods -> dwd full & SCD") {
- val createSchemaSql =
- """
- |CREATE SCHEMA IF NOT EXISTS ods;
- |CREATE SCHEMA IF NOT EXISTS dwd;
- |create extension if not exists "uuid-ossp";""".stripMargin
- execute(createSchemaSql, "postgres", 5432)
-
- val createOdsTableSql =
- """
- |drop table if exists ods.t_order;
- |create table if not exists ods.t_order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | job_id varchar(128)
- |) ;""".stripMargin
- execute(createOdsTableSql, "postgres", 5432)
-
- val initOdsSql =
- """
- |insert into ods.t_order (job_id, order_sn, product_code, product_name, product_version, product_status, user_code, user_name, user_age, user_address, product_count, price, discount, order_status, order_create_time, order_update_time) values
- | (1, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(1, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 11:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(1, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 1, '2022-04-04 12:00:00', '2022-04-08 12:00:00') -- 新增
- |,(2, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(2, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 10:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(2, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 2, '2022-04-04 10:00:00', '2022-04-09 10:00:00') -- 状态更新
- |,(2, 'EEE', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 1.4, 1, '2022-04-09 10:00:00', '2022-04-09 10:00:00'); -- 新增""".stripMargin
- execute(initOdsSql, "postgres", 5432)
-
- val createDwdTableSql =
- """
- |create extension if not exists "uuid-ossp";
- |drop table if exists dwd.t_fact_order;
- |create table dwd.t_fact_order(
- | onedata_order_id varchar(128) default uuid_generate_v1(),
- | order_sn varchar(128),
- | product_id varchar(128),
- | user_id varchar(128),
- | product_count int,
- | price decimal(10,4),
- | discount decimal(10,4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | actual decimal(10,4),
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_product;
- |create table dwd.t_dim_product(
- | id varchar(128) default uuid_generate_v1(), -- 渐变id
- | mid varchar(128),
- | name varchar(128),
- | version varchar(128),
- | status varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_user;
- |create table dwd.t_dim_user(
- | user_id varchar(128) default uuid_generate_v1(), -- 渐变id
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);""".stripMargin
- execute(createDwdTableSql, "postgres", 5432)
-
- val initDwdSql =
- """
- |insert into dwd.t_dim_product(id, mid, name, version, status, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- | ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2021-01-01 10:00:00', '2021-01-01 10:00:00', '2021-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_dim_user(user_id, user_code, user_name, user_age, user_address, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- |('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00', '2020-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_fact_order(onedata_order_id, order_sn, product_id, user_id, product_count, price, discount, order_status, order_create_time, order_update_time, actual, start_time, end_time, is_active, is_latest) values
- |('3abd0495-9abe-44a0-b95b-0e42aeadc909', 'AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1, '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7, '2022-04-04 10:00:00', null, '1', '1'),
- |('3abd0495-9abe-44a0-b95b-0e42aeadc919', 'BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2, '2022-04-04 11:00:00', '2022-04-08 10:00:00', 9.7, '2022-04-04 10:00:00', null, '1', '1'),
- |('3abd0495-9abe-44a0-b95b-0e42aeadc929', 'CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 2, '2022-04-04 12:00:00', '2022-04-07 10:00:00', 29.7, '2022-04-04 10:00:00', null, '1', '1');
- |""".stripMargin
- execute(initDwdSql, "postgres", 5432)
-
- val truncateOdsLogSql =
- """
- |truncate table job_log;""".stripMargin
- executeMigration(truncateOdsLogSql)
-
- val truncateOdsLogStepSql =
- """
- |truncate table step_log;""".stripMargin
- executeMigration(truncateOdsLogStepSql)
-
- val initOdsLogSql =
- """
- |insert into job_log (job_id, workflow_name, `period`, job_name, data_range_start, data_range_end, status) values
- |(1, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS'),
- |(2, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS');
- |""".stripMargin
- executeMigration(initOdsLogSql)
-
- runJob(Array("single-job",
- s"--name=ods_to_dwd_full_sc", "--period=1440",
- "--local", s"--property=$filePath"))
-
- var dwdDf = readTable("postgres", 5432, "dwd.t_fact_order")
- dwdDf = dwdDf.withColumn("end_time", when(col("order_sn") === "CCC" && col("is_latest") === "0", getTimeStampFromStr("2999-09-09 00:00:00")).otherwise(col("end_time")))
- .withColumn("start_time", when(col("order_sn") === "CCC" && col("is_latest") === "1", getTimeStampFromStr("2999-09-09 00:00:00")).otherwise(col("start_time")))
- .withColumn("order_update_time", when(col("order_sn") === "CCC" && col("is_latest") === "1", getTimeStampFromStr("2999-09-09 00:00:00")).otherwise(col("order_update_time")))
- dwdDf.show()
-
- val newCreatedData = Seq(
- Row("BBB", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(10.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 11:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(9.7), getTimeStampFromStr("2022-04-04 10:00:00"), null, "1", "1"),
- Row("CCC", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(30.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-07 10:00:00"), BigDecimal(29.7), getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2999-09-09 00:00:00"), "1", "0"),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "1", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-04 10:00:00"), BigDecimal(19.7), getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), "1", "0"),
- Row("CCC", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(30.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2999-09-09 00:00:00"), BigDecimal(29.7), getTimeStampFromStr("2999-09-09 00:00:00"), null, "0", "1"),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(19.7), getTimeStampFromStr("2022-04-08 10:00:00"), null, "1", "1"),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "1", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-08 12:00:00"), BigDecimal(199.6), getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), "1", "0"),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(199.6), getTimeStampFromStr("2022-04-09 10:00:00"), null, "1", "1"),
- Row("EEE", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(1.4000), "1", getTimeStampFromStr("2022-04-09 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(198.6), getTimeStampFromStr("2022-04-09 10:00:00"), null, "1", "1")
- )
- val dwdOrderShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdOrderSCDSchema)
- )
- dwdOrderShouldBe.show()
-
- assertSmallDataFrameEquality(dwdDf.drop("job_id", "onedata_order_id"), dwdOrderShouldBe, orderedComparison = false)
-
- }
- it("ods -> dwd incremental & SCD") {
- val createSchemaSql =
- """
- |CREATE SCHEMA IF NOT EXISTS ods;
- |CREATE SCHEMA IF NOT EXISTS dwd;
- |create extension if not exists "uuid-ossp";""".stripMargin
- execute(createSchemaSql, "postgres", 5432)
-
- val createOdsTableSql =
- """
- |drop table if exists ods.t_order;
- |create table if not exists ods.t_order
- |(
- | order_sn varchar(128),
- | product_code varchar(128),
- | product_name varchar(128),
- | product_version varchar(128),
- | product_status varchar(128),
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | product_count int,
- | price decimal(10, 4),
- | discount decimal(10, 4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | job_id varchar(128)
- |) ;""".stripMargin
- execute(createOdsTableSql, "postgres", 5432)
-
- val initOdsSql =
- """
- |insert into ods.t_order (job_id, order_sn, product_code, product_name, product_version, product_status, user_code, user_name, user_age, user_address, product_count, price, discount, order_status, order_create_time, order_update_time) values
- | (1, 'AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00', '2022-04-08 10:00:00') -- 正常更新
- |,(1, 'BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 30, 0.3, 1, '2022-04-04 11:00:00', '2022-04-08 09:00:00') -- 迟到时间,不做更新,该状态还是1,不更新
- |,(1, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 1, '2022-04-04 12:00:00', '2022-04-08 12:00:00') -- 新增
- |,(2, 'DDD', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 0.4, 2, '2022-04-04 10:00:00', '2022-04-09 10:00:00') -- 状态更新
- |,(2, 'EEE', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '迎宾街道', 12, 200, 1.4, 1, '2022-04-09 10:00:00', '2022-04-09 10:00:00'); -- 新增""".stripMargin
- execute(initOdsSql, "postgres", 5432)
-
- val createDwdTableSql =
- """
- |create extension if not exists "uuid-ossp";
- |drop table if exists dwd.t_fact_order;
- |create table dwd.t_fact_order(
- | onedata_order_id varchar(128) default uuid_generate_v1(),
- | order_sn varchar(128),
- | product_id varchar(128),
- | user_id varchar(128),
- | product_count int,
- | price decimal(10,4),
- | discount decimal(10,4),
- | order_status varchar(128),
- | order_create_time timestamp,
- | order_update_time timestamp,
- | actual decimal(10,4),
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_product;
- |create table dwd.t_dim_product(
- | id varchar(128) default uuid_generate_v1(), -- 渐变id
- | mid varchar(128),
- | name varchar(128),
- | version varchar(128),
- | status varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);
- |
- |drop table if exists dwd.t_dim_user;
- |create table dwd.t_dim_user(
- | user_id varchar(128) default uuid_generate_v1(), -- 渐变id
- | user_code varchar(128),
- | user_name varchar(128),
- | user_age int,
- | user_address varchar(128),
- | create_time timestamp,
- | update_time timestamp,
- | start_time timestamp,
- | end_time timestamp,
- | is_active varchar(1),
- | is_latest varchar(1),
- | is_auto_created varchar(1)
- |);""".stripMargin
- execute(createDwdTableSql, "postgres", 5432)
-
- val initDwdSql =
- """
- |insert into dwd.t_dim_product(id, mid, name, version, status, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- | ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2021-01-01 10:00:00', '2021-01-01 10:00:00', '2021-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_dim_user(user_id, user_code, user_name, user_age, user_address, create_time, update_time, start_time, end_time, is_active, is_latest, is_auto_created) values
- |('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00', '2020-01-01 10:00:00', null, '1', '1', '0');
- |
- |insert into dwd.t_fact_order(onedata_order_id, order_sn, product_id, user_id, product_count, price, discount, order_status, order_create_time, order_update_time, actual, start_time, end_time, is_active, is_latest) values
- |('3abd0495-9abe-44a0-b95b-0e42aeadc909', 'AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1, '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7, '2022-04-04 10:00:00', null, '1', '1'),
- |('3abd0495-9abe-44a0-b95b-0e42aeadc919', 'BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2, '2022-04-04 11:00:00', '2022-04-08 10:00:00', 9.7, '2022-04-04 10:00:00', null, '1', '1'),
- |('3abd0495-9abe-44a0-b95b-0e42aeadc929', 'CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 2, '2022-04-04 12:00:00', '2022-04-07 10:00:00', 29.7, '2022-04-04 10:00:00', null, '1', '1');
- |""".stripMargin
- execute(initDwdSql, "postgres", 5432)
-
- val truncateOdsLogSql =
- """
- |truncate table job_log;""".stripMargin
- executeMigration(truncateOdsLogSql)
-
- val truncateOdsLogStepSql =
- """
- |truncate table step_log;""".stripMargin
- executeMigration(truncateOdsLogStepSql)
-
- val initOdsLogSql =
- """
- |insert into job_log (job_id, workflow_name, `period`, job_name, data_range_start, data_range_end, status) values
- |(1, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS'),
- |(2, 'source_to_ods', 1440, 'source_to_ods-20220408000000', '20220408000000', '20220409000000', 'SUCCESS');
- |""".stripMargin
- executeMigration(initOdsLogSql)
-
- runJob(Array("single-job",
- s"--name=ods_to_dwd_incremental_sc", "--period=1440",
- "--local", s"--property=$filePath"))
-
- val dwdDf = readTable("postgres", 5432, "dwd.t_fact_order")
- dwdDf.show()
-
- val newCreatedData = Seq(
- Row("BBB", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(10.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 11:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(9.7), getTimeStampFromStr("2022-04-04 10:00:00"), null, "1", "1"),
- Row("CCC", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(30.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-07 10:00:00"), BigDecimal(29.7), getTimeStampFromStr("2022-04-04 10:00:00"), null, "1", "1"),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "1", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-04 10:00:00"), BigDecimal(19.7), getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), "1", "0"),
- Row("AAA", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(20.0000), BigDecimal(0.3000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-08 10:00:00"), BigDecimal(19.7), getTimeStampFromStr("2022-04-08 10:00:00"), null, "1", "1"),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "1", getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-08 12:00:00"), BigDecimal(199.6), getTimeStampFromStr("2022-04-04 12:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), "1", "0"),
- Row("DDD", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(0.4000), "2", getTimeStampFromStr("2022-04-04 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(199.6), getTimeStampFromStr("2022-04-09 10:00:00"), null, "1", "1"),
- Row("EEE", "3abd0495-9abe-44a0-b95b-0e42aeadc807", "06347be1-f752-4228-8480-4528a2166e14", 12, BigDecimal(200.0000), BigDecimal(1.4000), "1", getTimeStampFromStr("2022-04-09 10:00:00"), getTimeStampFromStr("2022-04-09 10:00:00"), BigDecimal(198.6), getTimeStampFromStr("2022-04-09 10:00:00"), null, "1", "1")
- )
- val dwdOrderShouldBe = spark.createDataFrame(
- spark.sparkContext.parallelize(newCreatedData),
- StructType(dwdOrderSCDSchema)
- )
- dwdOrderShouldBe.show()
-
- assertSmallDataFrameEquality(dwdDf.drop("job_id", "onedata_order_id"), dwdOrderShouldBe, orderedComparison = false)
-
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuit.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuit.scala
deleted file mode 100644
index 4e6eac3..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuit.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.postgres
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import org.apache.spark.sql.DataFrame
-
-import java.sql.{DriverManager, SQLException}
-
-trait PostgresEtlSuit extends ETLSuit {
-
- def readTable(dbName: String, port: Int, tableName: String): DataFrame = {
- spark.read
- .format("jdbc")
- .option("url", s"jdbc:postgresql://localhost:$port/$dbName")
- .option("dbtable", tableName)
- .option("user", "postgres")
- .option("password", "postgres")
- .load()
- }
-
- def execute(sql: String, dbName: String, port: Int): Boolean = {
- val url = s"jdbc:postgresql://localhost:$port/$dbName"
- val connection = DriverManager.getConnection(url, "postgres", "postgres")
- val statement = connection.createStatement()
- try {
- statement.execute(sql)
- } catch {
- case ex: SQLException => throw new RuntimeException(ex)
- } finally {
- if (connection != null) connection.close()
- if (statement != null) statement.close()
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuitExecutor.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuitExecutor.scala
deleted file mode 100644
index 3d716a1..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/end2end/postgres/PostgresSuitExecutor.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.end2end.postgres
-
-import com.github.sharpdata.sharpetl.core.repository.MyBatisSession
-import com.github.sharpdata.sharpetl.core.util.ETLConfig
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.FixedMySQLContainer
-import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, Suites}
-
-@DoNotDiscover
-class PostgresSuitExecutor extends Suites(
- new PostgresModelingSpec
-) with BeforeAndAfterAll {
- val postgresContainer = new FixedPostgresContainer("postgres:12.0-alpine")
- val mysqlContainer = new FixedMySQLContainer("mysql:5.7")
-
- override protected def beforeAll(): Unit = {
- mysqlContainer.configurePort(2333, "sharp_etl")
- mysqlContainer.start()
-
- postgresContainer.configurePort(5432, "postgres")
- postgresContainer.start()
-
- ETLConfig.reInitProperties()
- MyBatisSession.reloadFactory()
- super.beforeAll()
- }
-
- override protected def afterAll(): Unit = {
- mysqlContainer.stop()
- postgresContainer.stop()
- super.afterAll()
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/IOTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/IOTest.scala
deleted file mode 100644
index c4e5a15..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/IOTest.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job
-
-import com.github.sharpdata.sharpetl.core.api.Variables
-import com.github.sharpdata.sharpetl.core.datasource.config.DataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.model.JobLog
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.junit.Assert.assertEquals
-import org.mockito.Mockito.mock
-import org.scalatest.funspec.AnyFunSpec
-
-
-class IOTest extends AnyFunSpec with SparkSessionTestWrapper {
-
- it("should add Derived Columns after reading from source") {
- val processJobStep = new WorkflowStep()
- val dataSourceConfig = new DataSourceConfig()
- dataSourceConfig.derivedColumns = "a:10;b:20"
- dataSourceConfig.dataSourceType = "temp"
- processJobStep.sql =
- """
- select "2021-10-21" as `day`
- """.stripMargin
-
- processJobStep.source = dataSourceConfig
- val df = IO.read(spark, processJobStep, Variables.empty, mock(classOf[JobLog]))
-
- assertEquals(3, df.schema.length)
- assertEquals("day", df.schema(0).name)
- assertEquals("a", df.schema(1).name)
- assertEquals("b", df.schema(2).name)
-
- val row = df.collect()(0)
- assertEquals("2021-10-21", row(0))
- assertEquals("10", row(1))
- assertEquals("20", row(2))
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/SparkSessionTestWrapper.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/SparkSessionTestWrapper.scala
deleted file mode 100644
index 93641c2..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/SparkSessionTestWrapper.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job
-
-import com.github.sharpdata.sharpetl.spark.extension.UdfInitializer
-import com.github.sharpdata.sharpetl.spark.utils.ETLSparkSession
-import org.apache.spark.sql.SparkSession
-
-object SparkSessionTestWrapper {
- lazy val spark: SparkSession = {
- ETLSparkSession.local = true
- val session = SparkSession
- .builder()
- .master("local")
- .appName("spark session")
- .config("spark.sql.shuffle.partitions", "1")
- .config("spark.sql.legacy.timeParserPolicy", "LEGACY")
- .getOrCreate()
- UdfInitializer.init(session)
- session
- }
-}
-
-trait SparkSessionTestWrapper {
-
- lazy val spark: SparkSession = SparkSessionTestWrapper.spark
-
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/config/DataSourceConfigTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/config/DataSourceConfigTest.scala
deleted file mode 100644
index 2698ac5..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/job/config/DataSourceConfigTest.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.job.config
-
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import org.scalatest.funspec.AnyFunSpec
-
-class DataSourceConfigTest extends AnyFunSpec {
-
- describe("DataSourceConfig") {
- it("should have an options property") {
- val config = new DBDataSourceConfig
- assert(config.options.isEmpty)
- }
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckRuleSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckRuleSpec.scala
deleted file mode 100644
index cc9fe00..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckRuleSpec.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.quality
-
-import com.github.sharpdata.sharpetl.spark.job.SparkSessionTestWrapper
-import com.github.sharpdata.sharpetl.spark.test.DatasetComparer
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityConfig, ErrorType, QualityCheckRule}
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-
-class DataQualityCheckRuleSpec extends AnyFlatSpec with should.Matchers with SparkSessionTestWrapper with DatasetComparer {
-
- val BUILT_IN_QUALITY_CHECK_RULES = Seq(
- QualityCheckRule("null check", "powerNullCheck($column)", ErrorType.error),
- QualityCheckRule("custom check for name and address", "powerNullCheck(name) AND powerNullCheck(address)", ErrorType.error)
- )
-
- it should "replace column placeholder with actual name" in {
- BUILT_IN_QUALITY_CHECK_RULES.head.withColumn("name") should be(
- DataQualityConfig("name", "null check", "powerNullCheck(`name`)", ErrorType.error)
- )
- }
-
- it should "make no effects with custom filter" in {
- BUILT_IN_QUALITY_CHECK_RULES.tail.head.withColumn("name") should be(
- DataQualityConfig("name", "custom check for name and address", "powerNullCheck(name) AND powerNullCheck(address)", ErrorType.error)
- )
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckSpec.scala
deleted file mode 100644
index 9d0b72c..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckSpec.scala
+++ /dev/null
@@ -1,400 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.quality
-
-import com.github.sharpdata.sharpetl.spark.job.{SparkSessionTestWrapper, SparkWorkflowInterpreter}
-import com.github.sharpdata.sharpetl.spark.test.DatasetComparer
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.repository.mysql.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityCheckResult, ErrorType, QualityCheck, QualityCheckRule}
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import com.github.sharpdata.sharpetl.core.util.StringUtil
-import com.github.sharpdata.sharpetl.spark.job.{SparkSessionTestWrapper, SparkWorkflowInterpreter}
-import org.apache.spark.sql.{DataFrame, Row}
-import org.apache.spark.sql.types._
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-
-class DataQualityCheckSpec extends AnyFlatSpec with should.Matchers with SparkSessionTestWrapper with DatasetComparer with BeforeAndAfterEach {
-
- val rules = {
- Map(
- ("null check", QualityCheckRule("null check", "powerNullCheck($column)", ErrorType.error)),
- ("111 check", QualityCheckRule("111 check", "$column == \"111\" OR $column IS NULL", ErrorType.warn))
- )
- }
- val qualityCheckAccessor = new QualityCheckAccessor()
-
- val interpreter = new SparkWorkflowInterpreter(spark, rules, qualityCheckAccessor)
-
- val viewName = "test_view_name"
-
- val resultView = s"${viewName}_result"
-
- override protected def afterEach(): Unit = {
- spark.catalog.dropTempView(viewName)
- spark.catalog.dropTempView(resultView)
- super.afterEach()
- }
-
- it should "check simple null data" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111"),
- Row(2, "222"),
- Row(3, null),
- Row(4, null)
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id").error
-
- result.size should be(1)
- result.head.ids should be("3,4")
- }
-
- it should "check simple null string" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111"),
- Row(2, "222"),
- Row(3, "null"),
- Row(4, null)
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id").error
-
- result.size should be(1)
- result.head.ids should be("3,4")
- }
-
- it should "support UDF `powerNullCheck` " in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111"),
- Row(2, "222"),
- Row(3, "null"),
- Row(4, null),
- Row(5, "nUlL")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id").error
-
-
- result.size should be(1)
- result.head.ids should be("3,4,5")
- }
-
- it should "run multiple rules in one query" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111"),
- Row(2, "222"),
- Row(3, "null"),
- Row(4, null),
- Row(5, "nUlL")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check, 111 check ")
- )
-
- val interpreter = new SparkWorkflowInterpreter(spark,
- Map(
- ("null check", QualityCheckRule("null check", "powerNullCheck($column)", ErrorType.error)),
- ("111 check", QualityCheckRule("111 check", "$column == \"111\" OR $column IS NULL", ErrorType.error))
- ),
- qualityCheckAccessor
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id")
-
- val errors = result.error
-
- errors.size should be(2)
- errors.head.ids should be("3,4,5")
- errors.head.dataCheckType should be("null check")
- errors.tail.head.ids should be("1,4")
- errors.tail.head.dataCheckType should be("111 check")
-
- result.warn.size should be(0)
- val passed = passedResult(result.sql, viewName, resultView).collectAsList()
- passed.size() should be(1)
- }
-
- it should "group by column name, check type" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true),
- StructField("address", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111", " Null "),
- Row(2, "222", "Beijing"),
- Row(3, "null", "Xian"),
- Row(4, null, " null"),
- Row(5, "nUlL", "ShangHai")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check, 111 check "),
- ("column.address.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id")
-
- val errors = result.error
- val warns = result.warn
-
- errors should be(
- Seq(
- DataQualityCheckResult("name", "null check", "3,4,5", ErrorType.error, 0, 3),
- DataQualityCheckResult("address", "null check", "1,4", ErrorType.error, 0, 2)
- )
- )
-
- warns should be(
- Seq(
- DataQualityCheckResult("name", "111 check", "1,4", ErrorType.warn, 2, 0)
- )
- )
-
- val passed = passedResult(result.sql, viewName, resultView).collectAsList()
- passed.size() should be(1)
- }
-
- it should "limit works" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("name", StringType, true),
- StructField("address", StringType, true)
- )
-
- val data = Seq(
- Row(1, "111", " Null "),
- Row(2, "222", "Beijing"),
- Row(3, "null", "Xian"),
- Row(4, null, " null"),
- Row(5, "nUlL", "ShangHai")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check, 111 check "),
- ("column.address.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id", 2)
-
- val errors = result.error
- val warns = result.warn
-
- errors should be(
- Seq(
- DataQualityCheckResult("name", "null check", "3,4", ErrorType.error, 0, 3),
- DataQualityCheckResult("address", "null check", "1,4", ErrorType.error, 0, 2)
- )
- )
-
- warns should be(
- Seq(
- DataQualityCheckResult("name", "111 check", "1,4", ErrorType.warn, 2, 0)
- )
- )
-
- val passed = passedResult(result.sql, viewName, resultView).collectAsList()
- passed.size() should be(1)
- }
-
- it should "support multiple column ids" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("phone", StringType, true),
- StructField("name", StringType, true),
- StructField("address", StringType, true)
- )
-
- val data = Seq(
- Row(1, "155 233 2333", "111", " Null "),
- Row(1, "233 233 2333", "222", "null "),
- Row(3, "155 233 2333", "null", "Xian"),
- Row(4, "155 233 2333", null, " null"),
- Row(5, "155 233 2333", "nUlL", "ShangHai"),
- Row(6, "155 233 2333", "zhang san", "ShangHai")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id"),
- ("column.name.qualityCheckRules", "null check, 111 check "),
- ("column.address.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
- val result = interpreter.check(viewName, interpreter.parseQualityConfig(step), "id, phone", 2)
-
- val errors = result.error
- val warns = result.warn
-
- errors should be(
- Seq(
- DataQualityCheckResult("name", "null check", "3__155 233 2333,4__155 233 2333", ErrorType.error, 0, 3),
- DataQualityCheckResult("address", "null check", "1__155 233 2333,1__233 233 2333", ErrorType.error, 0, 3)
- )
- )
-
- warns should be(
- Seq(
- DataQualityCheckResult("name", "111 check", "1__155 233 2333,4__155 233 2333", ErrorType.warn, 2, 0)
- )
- )
-
- val passed = passedResult(result.sql, viewName, resultView).collectAsList()
- passed.size() should be(1)
- }
-
- it should "support multiple column ids drop duplicated data" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("phone", StringType, true),
- StructField("name", StringType, true),
- StructField("address", StringType, true)
- )
-
- val data = Seq(
- Row(1, "155 233 2333", "111", " Null "),
- Row(1, "155 233 2333", "222", "null "),
- Row(3, "155 233 2333", "lisi", "Xian"),
- Row(4, "155 233 2333", null, " null"),
- Row(5, "155 233 2333", "wangwu", "ShangHai"),
- Row(5, "155 233 2333", "zhang san", "ShangHai")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id, phone"),
- ("column.name.qualityCheckRules", "null check")
- )
- testDf.createOrReplaceTempView(viewName)
-
- val interpreter = new SparkWorkflowInterpreterStub(spark, rules)
-
- val result = interpreter.qualityCheck(step, "1", "???", testDf)
-
-
- val errors = result.error
-
- errors should be(
- Seq(
- DataQualityCheckResult("name", "null check", "4__155 233 2333", ErrorType.error, 0, 1),
- DataQualityCheckResult("id, phone", "Duplicated PK check", "1__155 233 2333,5__155 233 2333", ErrorType.error, 0, 2)
- )
- )
-
- //result.passed.show(truncate = false)
-
- result.passed.collectAsList().size() should be(3)
- }
-
- def passedResult(sql: String, tempViewName: String, resultView: String): DataFrame = {
- val df = spark.sql(QualityCheck.generateAntiJoinSql(sql, StringUtil.EMPTY, tempViewName))
- spark.catalog.dropTempView(tempViewName)
- spark.catalog.dropTempView(resultView)
- df
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckUDRSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckUDRSpec.scala
deleted file mode 100644
index 4e863d2..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/DataQualityCheckUDRSpec.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.quality
-
-import com.github.sharpdata.sharpetl.spark.job.{SparkSessionTestWrapper, SparkWorkflowInterpreter}
-import com.github.sharpdata.sharpetl.spark.test.DatasetComparer
-import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityCheckResult, ErrorType, QualityCheckRule}
-import com.github.sharpdata.sharpetl.core.repository.mysql.QualityCheckAccessor
-import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
-import org.apache.spark.sql.{Row, SparkSession}
-import org.apache.spark.sql.types._
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should
-
-
-class DataQualityCheckUDRSpec extends AnyFlatSpec with should.Matchers with SparkSessionTestWrapper with DatasetComparer {
-
- val rules = {
- Map(
- ("null check", QualityCheckRule("null check", "powerNullCheck($column)", ErrorType.error)),
- ("111 check", QualityCheckRule("111 check", "$column == \"111\" OR $column IS NULL", ErrorType.warn)),
- ("duplicated check", QualityCheckRule("duplicated check", "UDR.com.github.sharpdata.sharpetl.core.quality.udr.DuplicatedCheck", ErrorType.error)),
- ("233 agg check", QualityCheckRule("233 agg check", "UDR.com.github.sharpdata.sharpetl.spark.quality.udr.AggCheck", ErrorType.warn))
- )
- }
- val viewName = "test_view_name_udr"
-
- val interpreter = new SparkWorkflowInterpreterStub(spark, rules)
-
- it should "check duplicated value" in {
-
- val schema = List(
- StructField("id", IntegerType, true),
- StructField("phone", StringType, true),
- StructField("account", DoubleType, true),
- StructField("address", StringType, true)
- )
-
- val data = Seq(
- Row(1, "155 233 2333", 233.33, "beijing dongzhimen xxx number 23"),
- Row(1, "233 233 2334", 666.666, "beijing dongzhimen xxx number 23"),
- Row(3, null, 0.1, "xi'an tiangubalu xxx number 6"),
- Row(4, "155 233 2333", 123.456, "beijing dongzhimen xxx number 23")
- )
-
- val testDf = spark.createDataFrame(
- spark.sparkContext.parallelize(data),
- StructType(schema)
- )
-
- val step = new WorkflowStep()
- step.source = new DBDataSourceConfig()
- step.source.options = Map(
- ("idColumn", "id, phone"),
- ("topN", "2"),
- ("column.phone.qualityCheckRules", "duplicated check, null check"),
- ("column.address.qualityCheckRules", "duplicated check"),
- ("column.account.qualityCheckRules", "233 agg check")
- )
-
- //testDf.createOrReplaceTempView(viewName)
- //val sqlResult = interpreter.check(testDf, viewName, interpreter.parseQualityConfig(step), "id, phone", 2)
- //val udrResult = interpreter.checkUDR(testDf, viewName, interpreter.parseQualityConfig(step), "id, phone", 2)
- //val result = sqlResult union udrResult
-
- val result = interpreter.qualityCheck(step, "1", "???", testDf)
-
- val errors = result.error
- val warns = result.warn
-
-
- // errors should be(
- // Seq(
- // DataQualityCheckResult("phone", "null check", "3__NULL", ErrorType.error, 0, 1),
- // DataQualityCheckResult("phone", "duplicated check", "1__155 233 2333,4__155 233 2333", ErrorType.error, 0, 2),
- // DataQualityCheckResult("address", "duplicated check", "1__155 233 2333,1__233 233 2334", ErrorType.error, 0, 3)
- // )
- // )
-
- errors.head should be(DataQualityCheckResult("phone", "null check", "3__NULL", ErrorType.error, 0, 1))
-
- errors(1).dataCheckType should be("duplicated check")
- errors(1).errorCount should be(2)
- errors(1).warnCount should be(0)
-
- errors(2).dataCheckType should be("duplicated check")
- errors(2).errorCount should be(3)
- errors(2).warnCount should be(0)
-
- Seq("1__155 233 2333", "4__155 233 2333") should contain theSameElementsAs errors(1).ids.split(",")
- errors(2).ids.split(",").toSet.subsetOf(
- Seq("1__155 233 2333", "1__233 233 2334", "4__155 233 2333").toSet
- ) should be(true)
-
- warns should be(
- Seq(
- DataQualityCheckResult("account", "233 agg check", "1__155 233 2333,1__233 233 2334", ErrorType.warn, 2, 0)
- )
- )
-
- val passed = result.passed.collectAsList()
- passed.isEmpty should be(true)
- }
-}
-
-final class SparkWorkflowInterpreterStub(override val spark: SparkSession,
- override val dataQualityCheckRules: Map[String, QualityCheckRule])
- extends SparkWorkflowInterpreter(spark, dataQualityCheckRules, new QualityCheckAccessor()) {
- override def recordCheckResult(jobId: String, jobScheduleId: String, results: Seq[DataQualityCheckResult]): Unit = ()
-}
-
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/udr/AggCheck.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/udr/AggCheck.scala
deleted file mode 100644
index a167e1c..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/quality/udr/AggCheck.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.quality.udr
-
-import com.github.sharpdata.sharpetl.core.quality.QualityCheck.joinIdColumns
-import com.github.sharpdata.sharpetl.core.quality.{DataQualityConfig, UserDefinedRule}
-
-object AggCheck extends UserDefinedRule {
- override def check(tempViewName: String, idColumn: String, udr: DataQualityConfig): (String, String) = {
- val resultViewName = s"${tempViewName}__${udr.dataCheckType.replace(' ', '_')}__${udr.column}"
- val sql =
- s"""|CREATE TEMPORARY VIEW $resultViewName
- | (ID COMMENT 'duplicated id')
- | AS SELECT ${joinIdColumns(idColumn, prefix = "a")} AS id
- | FROM `$tempViewName` a
- | INNER JOIN (SELECT `$tempViewName`.`${udr.column}`
- | FROM `$tempViewName`
- | WHERE `$tempViewName`.`${udr.column}` > 233) b
- | ON a.`${udr.column}` = b.`${udr.column}`
- |""".stripMargin
- (sql, resultViewName)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/BatchKafkaMergeDFTest.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/BatchKafkaMergeDFTest.scala
deleted file mode 100644
index fc955c5..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/BatchKafkaMergeDFTest.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.test
-
-import com.google.common.base.Strings.isNullOrEmpty
-import com.github.sharpdata.sharpetl.core.datasource.config.BatchKafkaDataSourceConfig
-import org.apache.spark.sql.functions.{col, from_json}
-import org.apache.spark.sql.types.StructType
-import org.apache.spark.sql.{DataFrame, SparkSession}
-import org.scalatest.funspec.AnyFunSpec
-
-
-class BatchKafkaMergeDFTest extends AnyFunSpec {
-
- lazy val spark: SparkSession = SparkSession
- .builder()
- .appName(this.getClass.getSimpleName).master("local")
- .getOrCreate()
-
- private val kafkaDataSourceConfig = new BatchKafkaDataSourceConfig()
- private val schemaDDL = "a String,b String,c String,d String,e String"
- private val sourceSchema: StructType = StructType.fromDDL(schemaDDL)
- private val schemaMappingExpr: Seq[String] = schemaMapping(sourceSchema)
-
- private val testDF: DataFrame = spark.createDataFrame(Seq(
- ("null", """{"a":"a1","b":"b1","c":"c1","d":"d1","e":"e1"}""", "bigDataAllRating.o", "2022-01-20 11:21:45"),
- ("null", """{"a":"a2","b":"b2","c":"c2","d":"d2","e":"e2"}""", "bigDataAllRating.o", "2022-01-21 11:21:45"),
- ("null", """{"a":"a3","b":"b3","c":"c3","d":"d3","e":"e3"}""", "bigDataAllRating.o", "2022-01-22 11:21:45")))
- .toDF("key", "value", "topic", "timestamp")
-
- private def schemaMapping: StructType => Seq[String] = {
- sourceSchema =>
- sourceSchema
- .fieldNames
- .map(fieldName => s"""data.$fieldName as $fieldName""")
- }
-
- private def getDF(columns:String): DataFrame = {
- import spark.implicits._
-
- val messageColumnNames = columns match {
- case value: String if !isNullOrEmpty(value) => kafkaDataSourceConfig.topicMessageColumns.split(",").map(_.trim)
- case _ => Array.empty[String]
- }
-
- val exprColumns = "CAST(value as STRING)" +: messageColumnNames.map(it => s"CAST($it as STRING)")
- val selectColumns = (from_json($"value", sourceSchema) as "data") +: messageColumnNames.map(col)
- val allSchemaMappingExpr = schemaMappingExpr ++ messageColumnNames.toSeq
- testDF.selectExpr(exprColumns: _*).select(selectColumns: _*).selectExpr(allSchemaMappingExpr: _*)
- }
-
- it("should get other columns") {
- kafkaDataSourceConfig.topicMessageColumns = "timestamp"
- val dataFrame = getDF(kafkaDataSourceConfig.topicMessageColumns)
- assert(dataFrame.columns.mkString(",") == "a,b,c,d,e,timestamp")
- }
-
- it("should not get other columns") {
- kafkaDataSourceConfig.topicMessageColumns = null
- val dataFrame = getDF(kafkaDataSourceConfig.topicMessageColumns)
- assert(dataFrame.columns.mkString(",") == "a,b,c,d,e")
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DataFrameComparer.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DataFrameComparer.scala
deleted file mode 100644
index 4c3e41b..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DataFrameComparer.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.test
-
-import org.apache.spark.sql.DataFrame
-
-trait DataFrameComparer extends DatasetComparer {
-
- /**
- * Raises an error unless `actualDF` and `expectedDF` are equal
- */
- def assertSmallDataFrameEquality(actualDF: DataFrame,
- expectedDF: DataFrame,
- ignoreNullable: Boolean = false,
- ignoreColumnNames: Boolean = false,
- orderedComparison: Boolean = true,
- truncate: Int = 500): Unit = {
- assertSmallDatasetEquality(
- actualDF,
- expectedDF,
- ignoreNullable,
- ignoreColumnNames,
- orderedComparison,
- truncate
- )
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DatasetComparer.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DatasetComparer.scala
deleted file mode 100644
index 895fbad..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/DatasetComparer.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.test
-
-import org.apache.spark.sql.Dataset
-import org.apache.spark.sql.functions._
-
-case class DatasetSchemaMismatch(smth: String) extends Exception(smth)
-
-case class DatasetContentMismatch(smth: String) extends Exception(smth)
-
-case class DatasetCountMismatch(smth: String) extends Exception(smth)
-
-object DatasetComparerLike {
-
- def naiveEquality[T](o1: T, o2: T): Boolean = {
- o1.equals(o2)
- }
-
-}
-
-trait DatasetComparer {
-
- private def betterSchemaMismatchMessage[T](actualDS: Dataset[T], expectedDS: Dataset[T]): String = {
- "\nActual Schema Field | Expected Schema Field\n" + actualDS.schema
- .zipAll(
- expectedDS.schema,
- "",
- ""
- )
- .map {
- case (sf1, sf2) if sf1 == sf2 =>
- (s"$sf1 | $sf2")
- case ("", sf2) =>
- (s"MISSING | $sf2")
- case (sf1, "") =>
- (s"$sf1 | MISSING")
- case (sf1, sf2) =>
- (s"$sf1 | $sf2")
- }
- .mkString("\n")
- }
-
- private def betterContentMismatchMessage[T](a: Array[T], e: Array[T], truncate: Int): String = {
- s"""
- |Actual Content: ${a.take(truncate).mkString("Array(", ", ", ")")}
- |Expected Content: ${e.take(truncate).mkString("Array(", ", ", ")")}
- |""".stripMargin
- }
-
- /**
- * Raises an error unless `actualDS` and `expectedDS` are equal
- */
- def assertSmallDatasetEquality[T](actualDS: Dataset[T],
- expectedDS: Dataset[T],
- ignoreNullable: Boolean = false,
- ignoreColumnNames: Boolean = false,
- orderedComparison: Boolean = true,
- truncate: Int = 500): Unit = {
- if (!SchemaComparer.equals(actualDS.schema, expectedDS.schema, ignoreNullable, ignoreColumnNames)) {
- throw DatasetSchemaMismatch(
- betterSchemaMismatchMessage(actualDS, expectedDS)
- )
- }
- if (orderedComparison) {
- val a = actualDS.collect()
- val e = expectedDS.collect()
- if (!a.sameElements(e)) {
- throw DatasetContentMismatch(betterContentMismatchMessage(a, e, truncate))
- }
- } else {
- val a = defaultSortDataset(actualDS).collect()
- val e = defaultSortDataset(expectedDS).collect()
- if (!a.sameElements(e)) {
- throw DatasetContentMismatch(betterContentMismatchMessage(a, e, truncate))
- }
- }
- }
-
- def defaultSortDataset[T](ds: Dataset[T]): Dataset[T] = {
- val colNames = ds.columns
- val cols = colNames.map(col)
- ds.sort(cols: _*)
- }
-
- def sortPreciseColumns[T](ds: Dataset[T]): Dataset[T] = {
- val colNames = ds.dtypes
- .withFilter { dtype =>
- !(Seq("DoubleType", "DecimalType", "FloatType").contains(dtype._2))
- }
- .map(_._1)
- val cols = colNames.map(col)
- ds.sort(cols: _*)
- }
-}
\ No newline at end of file
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/SchemaComparer.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/SchemaComparer.scala
deleted file mode 100644
index e8a170d..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/test/SchemaComparer.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.test
-
-import org.apache.spark.sql.types.{StructField, StructType}
-
-object SchemaComparer {
-
- def equals(s1: StructType, s2: StructType, ignoreNullable: Boolean = false, ignoreColumnNames: Boolean = false): Boolean = {
- if (s1.length != s2.length) {
- false
- } else {
- val structFields: Seq[(StructField, StructField)] = s1.zip(s2)
- structFields.forall { t =>
- ((t._1.nullable == t._2.nullable) || ignoreNullable) &&
- ((t._1.name == t._2.name) || ignoreColumnNames) &&
- (t._1.dataType == t._2.dataType)
- }
- }
- }
-
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformSpec.scala
deleted file mode 100644
index 5c0b5d4..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DailyJobsSummaryReportTransformSpec.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import com.github.sharpdata.sharpetl.core.notification.sender.NotificationFactory
-import com.github.sharpdata.sharpetl.core.notification.sender.email.Email
-import com.github.sharpdata.sharpetl.core.repository.JobLogAccessor.jobLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.StepLogAccessor.stepLogAccessor
-import com.github.sharpdata.sharpetl.core.repository.model.{JobLog, JobStatus, StepLog}
-import com.github.sharpdata.sharpetl.core.util.Constants.DataSourceType
-import com.github.sharpdata.sharpetl.core.util.DateUtil.L_YYYY_MM_DD_HH_MM_SS
-import com.github.sharpdata.sharpetl.core.util.FlywayUtil
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.core.util.StringUtil.uuid
-import org.mockito.ArgumentCaptor
-import org.mockito.Mockito.{times, verify}
-import org.mockito.MockitoSugar.withObjectMocked
-import org.scalatest.DoNotDiscover
-
-import java.time.LocalDateTime
-import java.util.TimeZone
-
-@DoNotDiscover
-class DailyJobsSummaryReportTransformSpec extends ETLSuit {
- override val createTableSql: String = ""
-
- it("should send summary report correct") {
- val timeZone = System.getProperty("user.timezone")
- TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"))
- val startTime = LocalDateTime.parse("2022-01-01 10:00:00", L_YYYY_MM_DD_HH_MM_SS)
- val startTimeText = startTime.format(L_YYYY_MM_DD_HH_MM_SS)
-
- FlywayUtil.migrate()
- val job1Id = prepareData("job1", startTime.plusHours(10))
- val job2Id = prepareData("job2", startTime.plusHours(10))
-
- withObjectMocked[NotificationFactory.type] {
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=daily_jobs_summary_report_test", "--period=1440",
- "--local", s"--default-start-time=$startTimeText", "--env=test", "--once")
-
- runJob(jobParameters)
- val argument = ArgumentCaptor.forClass(classOf[Email])
- verify(NotificationFactory, times(1)).sendNotification(argument.capture())
- val email = argument.getValue.asInstanceOf[Email]
- val emptyStr = ""
-
- assert(email.attachment.get.content ==
- s"""projectName,workflowName,jobId,dataRangeStart,dataRangeEnd,jobStartTime,jobStatus,duration(seconds),dataFlow,to-hive,to-postgres,failStep,errorMessage
- |projectName,job1,${job1Id},2022-02-09 00:00:00,2022-02-10 00:00:00,2022-01-01 20:00:00,SUCCESS,20,hive(10) -> postgres(10),10,10,,""
- |projectName,job2,${job2Id},2022-02-09 00:00:00,2022-02-10 00:00:00,2022-01-01 20:00:00,SUCCESS,20,hive(10) -> postgres(10),10,10,,""$emptyStr""".stripMargin
- )
- }
- TimeZone.setDefault(TimeZone.getTimeZone(timeZone))
- }
-
- private def prepareData(jobName: String, jobStartTime: LocalDateTime): String = {
- val jobLog = mockJobLog(jobName, jobStartTime)
- jobLogAccessor.create(jobLog) // create time will converted to now, so let's do update
- jobLog.jobStartTime = jobStartTime
- jobLogAccessor.update(jobLog)
-
- stepLogAccessor.create(
- mockStepLog(jobLog.jobId, "1", DataSourceType.HIVE, 10, jobStartTime)
- )
- stepLogAccessor.create(
- mockStepLog(jobLog.jobId, "2", DataSourceType.POSTGRES, 5, jobStartTime)
- )
-
- jobLog.jobId
- }
-
- private def mockJobLog(wfName: String, jobStartTime: LocalDateTime): JobLog = {
- new JobLog(
- jobId = uuid,
- workflowName = wfName,
- period = 1440,
- jobName = "111",
- dataRangeStart = "20220209000000",
- dataRangeEnd = "20220210000000",
- jobStartTime = jobStartTime,
- jobEndTime = jobStartTime,
- status = JobStatus.SUCCESS,
- createTime = LocalDateTime.now(),
- lastUpdateTime = LocalDateTime.now(),
- logDrivenType = "",
- file = "",
- projectName = "projectName",
- applicationId = "applicationId",
- loadType = "",
- runtimeArgs = ""
- )
- }
-
- private def mockStepLog(jobId: String, stepId: String, targetType: String, targetCount: Int, startTime: LocalDateTime): StepLog = {
- new StepLog(
- jobId = jobId,
- stepId = stepId,
- status = JobStatus.SUCCESS,
- startTime = startTime,
- endTime = startTime,
- duration = 10,
- output = "",
- error = "",
- sourceCount = 10,
- targetCount = targetCount,
- successCount = 10,
- failureCount = 0,
- sourceType = "HIVE",
- targetType = targetType)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DynamicLoadingTransformerSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DynamicLoadingTransformerSpec.scala
deleted file mode 100644
index 92dafcc..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/DynamicLoadingTransformerSpec.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import ETLSuit.runJob
-import org.scalatest.DoNotDiscover
-
-@DoNotDiscover
-class DynamicLoadingTransformerSpec extends ETLSuit {
- it("it should execute spark sql in dynamic loading transformer") {
- val jobParameters: Array[String] = Array("single-job",
- "--name=dynamic_transformer", "--period=1440",
- "--local", s"--default-start-time=2021-11-28 15:30:30", "--env=test", "--once")
-
- runJob(jobParameters)
- val result = spark.sql("select * from `dynamic_tmp_transformer_result_table`")
- result.show()
- result.count() should be(2)
- }
-
- override val createTableSql: String = ""
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcTransformerSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcTransformerSpec.scala
deleted file mode 100644
index 8b4e0d0..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/JdbcTransformerSpec.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import com.github.sharpdata.sharpetl.core.util.DateUtil
-import ETLSuit.runJob
-import com.github.sharpdata.sharpetl.spark.end2end.mysql.MysqlSuit
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.types.{LongType, StructField, StructType}
-import org.scalatest.DoNotDiscover
-
-import java.time.LocalDateTime
-
-@DoNotDiscover
-class JdbcTransformerSpec extends MysqlSuit {
-
- val schema = List(
- StructField("number", LongType)
- )
-
- val expDf = spark.createDataFrame(
- spark.sparkContext.parallelize(Seq(Row(12.toLong)))
- , StructType(schema))
-
- override val createTableSql: String =
- """
- |create procedure my_test() begin
- |select 12 as 'number';
- |end
- |""".stripMargin
-
- it("should call sp and return result as dataframe") {
- execute(createTableSql)
- val df = JdbcResultSetTransformer.transform(
- Map(
- "dbName" -> "int_test",
- "dbType" -> "mysql",
- "sql" -> "call my_test()"
- ))
- assertSmallDataFrameEquality(df, expDf, orderedComparison = false)
- }
-
- it("should call sp with no return success") {
- execute("create procedure empty_procedure() begin end")
- val df = JdbcResultSetTransformer.transform(
- Map(
- "dbName" -> "int_test",
- "dbType" -> "mysql",
- "sql" -> "call empty_procedure()"
- ))
- }
-
- it("should read from sp and write to target") {
- execute("create table sp_test(number bigint)")
- val startTime = LocalDateTime.now().minusDays(1L).format(DateUtil.L_YYYY_MM_DD_HH_MM_SS)
-
- val jobParameters: Array[String] = Array("single-job",
- "--name=sp_test", "--period=1440",
- "--local", s"--default-start-time=${startTime}", "--env=test")
- runJob(jobParameters)
-
- val targetDf = readFromSource("sp_test")
- assertSmallDataFrameEquality(targetDf, expDf, orderedComparison = false)
- }
-}
diff --git a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/ReplaceTemplateTableNameSpec.scala b/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/ReplaceTemplateTableNameSpec.scala
deleted file mode 100644
index dbc377b..0000000
--- a/spark/src/test/scala/com/github/sharpdata/sharpetl/spark/transformation/ReplaceTemplateTableNameSpec.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.github.sharpdata.sharpetl.spark.transformation
-
-import com.github.sharpdata.sharpetl.spark.end2end.ETLSuit
-import ETLSuit.runJob
-import org.scalatest.DoNotDiscover
-
-
-@DoNotDiscover
-class ReplaceTemplateTableNameSpec extends ETLSuit {
-
- it("should replace variable") {
- val dataFrame = spark.createDataFrame(Seq((1, "name1"), (2, "name2"))).toDF("id", "name")
- dataFrame.createTempView("temp_source")
- val jobParameters: Array[String] = Array("single-job",
- "--name=replace_template_tablename", "--period=1440",
- "--local", s"--default-start-time=2021-11-28 15:30:30", "--env=test", "--once")
-
- runJob(jobParameters)
- val list = spark.sql("select * from temp_end").collectAsList()
- val expected = "[[1,name1], [2,name2]]"
- assert(expected == list.toString)
- }
- override val createTableSql: String = ""
-}
diff --git a/website/.eslintrc.js b/website/.eslintrc.js
new file mode 100644
index 0000000..8ccf3b1
--- /dev/null
+++ b/website/.eslintrc.js
@@ -0,0 +1,66 @@
+/**
+ * Copyright (c) Meta Platforms, Inc. and affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ *
+ * @format
+ */
+
+const OFF = 0;
+const WARNING = 1;
+const ERROR = 2;
+
+module.exports = {
+ root: true,
+ env: {
+ browser: true,
+ commonjs: true,
+ jest: true,
+ node: true,
+ },
+ parser: '@babel/eslint-parser',
+ parserOptions: {
+ allowImportExportEverywhere: true,
+ },
+ extends: ['airbnb', 'prettier'],
+ plugins: ['react-hooks', 'header'],
+ rules: {
+ // Ignore certain webpack alias because it can't be resolved
+ 'import/no-unresolved': [
+ ERROR,
+ {ignore: ['^@theme', '^@docusaurus', '^@generated']},
+ ],
+ 'import/extensions': OFF,
+ 'header/header': [
+ ERROR,
+ 'block',
+
+ [
+ '*',
+ ' * Copyright (c) Meta Platforms, Inc. and affiliates.',
+ ' *',
+ ' * This source code is licensed under the MIT license found in the',
+ ' * LICENSE file in the root directory of this source tree.',
+ ' *',
+ // Unfortunately eslint-plugin-header doesn't support optional lines.
+ // If you want to enforce your website JS files to have @flow or @format,
+ // modify these lines accordingly.
+ {
+ pattern: '.* @format',
+ },
+ ' ',
+ ],
+ ],
+ 'react/jsx-filename-extension': OFF,
+ 'react-hooks/rules-of-hooks': ERROR,
+ 'react/prop-types': OFF, // PropTypes aren't used much these days.
+ 'react/function-component-definition': [
+ WARNING,
+ {
+ namedComponents: 'function-declaration',
+ unnamedComponents: 'arrow-function',
+ },
+ ],
+ },
+};
diff --git a/website/.gitignore b/website/.gitignore
new file mode 100644
index 0000000..a2f1b00
--- /dev/null
+++ b/website/.gitignore
@@ -0,0 +1,23 @@
+# Dependencies
+/node_modules
+
+# Production
+/build
+
+# Generated files
+.docusaurus
+.cache-loader
+
+# Misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# ESLint
+.eslintcache
diff --git a/website/.prettierignore b/website/.prettierignore
new file mode 100644
index 0000000..9b74406
--- /dev/null
+++ b/website/.prettierignore
@@ -0,0 +1,3 @@
+node_modules
+build
+.docusaurus
diff --git a/website/.prettierrc b/website/.prettierrc
new file mode 100644
index 0000000..34cf084
--- /dev/null
+++ b/website/.prettierrc
@@ -0,0 +1,9 @@
+{
+ "arrowParens": "always",
+ "bracketSameLine": true,
+ "bracketSpacing": false,
+ "printWidth": 80,
+ "proseWrap": "never",
+ "singleQuote": true,
+ "trailingComma": "all"
+}
diff --git a/website/.stylelintrc.js b/website/.stylelintrc.js
new file mode 100644
index 0000000..11b6ffb
--- /dev/null
+++ b/website/.stylelintrc.js
@@ -0,0 +1,13 @@
+/**
+ * Copyright (c) Meta Platforms, Inc. and affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+module.exports = {
+ plugins: ['stylelint-copyright'],
+ rules: {
+ 'docusaurus/copyright-header': true,
+ },
+};
diff --git a/website/babel.config.js b/website/babel.config.js
new file mode 100644
index 0000000..84ad45a
--- /dev/null
+++ b/website/babel.config.js
@@ -0,0 +1,12 @@
+/**
+ * Copyright (c) Meta Platforms, Inc. and affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ *
+ * @format
+ */
+
+module.exports = {
+ presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
+};
diff --git a/website/blog/sharp-etl-introduce-01-why-we-need-sharp-etl.md b/website/blog/sharp-etl-introduce-01-why-we-need-sharp-etl.md
new file mode 100644
index 0000000..68f6b74
--- /dev/null
+++ b/website/blog/sharp-etl-introduce-01-why-we-need-sharp-etl.md
@@ -0,0 +1,246 @@
+---
+slug: sharp-etl-introduce-01-why-we-need-sharp-etl
+title: Sharp ETL介绍(一):为什么我们需要Sharp ETL
+tags: [sharp etl]
+date: 2022-08-01T00:00:00+08:00
+---
+
+## 导言
+
+本文结合目前的数据工程实践,尝试展开数据工程实践中ETL的原则,并对Sharp ETL做简要介绍。
+
+ETL或ELT是进行数据处理的常见手段之一,随着数据平台渡过蛮荒时代开始精细化治理,原始的编码方式和无组织的SQL脚本已经不能满足需求。我们将会通过展开现有ETL在当前遇到的困境来引入Sharp ETL的独有但必不可缺的功能。
+
+
+
+## 之前的ETL有什么问题
+
+常见的ETL形式有几种,这几种方式各有优劣,我们分别从几个维度展开一下:
+
+1. **代码方式**
+
+代码方式是非常常见的ETL编写方式,广泛适用于各种场景。
+
+pros:
+
+* 代码编写的ETL更容易随意扩展、增加自定义逻辑和复用代码逻辑。
+
+cons:
+
+* 但是代码就是代码,大多数代码都是命令式逻辑,并不容易维护和理解。
+* 缺乏足够的元数据支撑,不知道这个任务访问了哪几张表。当要修改表结构的时候,可能导致意料之外的任务失败。
+* 代码形式编写ETL会使得任务开发流程更加重量级,因为一旦修改实现逻辑,就需要重新部署jar包。考虑到安全因素,在一些企业内部的多租户平台上,上传新的jar包是需要审核的,整个流程就会在审核这里显著慢了下来。即使不需要审核,也需要手动部署到环境上,也有可能因为增加了新的逻辑影响了其他正常运行的任务。
+
+2. **托拉拽**
+
+托拉拽在新兴云平台和企业自建平台上非常常见,几乎是所有平台的标配。
+
+pros:
+
+* DAG形式也很容易理解,开发门槛低,只需要填入参数,轻松实现数据接入、数据去重等等逻辑。
+* 增加、修改逻辑都相当轻量级,甚至大多数平台也实现了版本管理,可以像管理代码一样管理任务,可以轻松做到回退版本、解决冲突等类似git的操作。
+* 部分平台可以通过小的组件组织成为更大的组件来复用ETL的逻辑。
+
+cons:
+
+* 自定义逻辑较困难,甚至在一些封闭性较高的平台上增加新的数据源都是问题。
+* 界面操作难以自动化,当需要批量针对若干任务进行某项操作时,图形化界面就不方便了。有时我们会遇到某个业务系统内需要的ETL的逻辑都很类似,但是任务数量很多,手动托拉拽很费事也容易出错。
+
+3. **纯SQL脚本**
+
+pros:
+
+* 经过合适拆分的SQL逻辑更容易理解(单条SQL的行数需要有限制),声明式实现更加表意,基本上人人都会SQL,门槛很低。
+
+cons:
+
+* 逻辑难以复用,经常需要复制另外一段SQL的逻辑。
+* 缺乏系统性编排,纯SQL脚本往往比较散乱。
+* 扩展受限,虽然各种SQL方言基本上都是支持UDF(User Defined Function)的,但是扩展能力仍然比不上代码(比如需要和某个HTTP API交互)。
+
+### 数据工程实践缺失
+
+我们认为,在ETL开发中需要包括这些工程实践:
+
+* 通过事实表和维度表的关联来检查数据一致性、完整性
+* 通过特殊值来记录表与表关联过程中的未知值(关联不上)和不适用值(不合理值)
+* 根据基于业务定义的数据质量检查规则在日志中分级分类记录数据质量问题,确保数据的完整性和准确性
+* 通过记录调度日志来与特定调度服务解耦合,增强调度的鲁棒性,同时也能够结构化的记录任务运行过程中的信息,方便排错
+* 可以及时主动识别上游系统的变更(主要是表结构等),并及时做出相应调整
+* 显式强调增量/全量、渐变/非渐变,作为任务的元数据
+* 任务依赖检查,只有当任务依赖满足时才执行任务
+
+而无论是代码编写、托拉拽还是纯SQL脚本都因为过多人工操作使得这些数据工程实践难以统一,常常有以下问题:
+
+* 只做关联而并不记录数据一致性、完整性问题。甚至缺乏最基本的关联。
+* 由于时间紧张、实现复杂等因素,数据质量检查操作缺失,导致进入数仓的数据缺乏最基本的质量保证。
+* 数据运维难以自动化,任务失败后多需要人工介入后才可以使任务恢复正常。
+* 任务调度过度依赖于已有调度服务,缺乏防呆设计,难以解决重复调度的问题(假如调度服务出现异常,任务被重复调度了,结果就是重复计算)
+* 错误排查过度依赖于任务执行日志,排查错误多依赖于不停的推断、加日志和尝试。如果是纯SQL脚本,则经常遇到几千行的SQL任务失败,然后不知道从哪里开始debug的窘境。
+
+这些工程实践往往会被人忽略而导致后续的数据问题,所以我们需要一个为我们封装好了上述工程实践的框架,从而确保ETL的高质量和低运维。
+
+## 理想中的ETL应该什么样子?
+
+近些年来最火热的莫过于流处理、流式ETL这些概念。虽然这篇文章与流或者批处理没有直接关系,还是要指出,无论是流式处理还是批处理,最基本的数据工程实践都需要具备,这些数据工程实践都是实际实践中不可或缺的一部分。以下讨论且抛开流式或批处理不谈。
+
+理想中的ETL应该能结合以上集中实现方式的优点,摒弃缺点,我们可以来构思一下:
+
+* 拥有SQL的语义化表达能力
+* 支持通过自定义代码逻辑扩展
+* 支持代码优先、版本管理
+* 在代码优先的基础上能够支持可视化工具构建
+* 能够对过长的SQL进行拆解,降低理解难度
+* 内置统一且标准的数据工程实践
+ * 事实表和维度表的关联检查
+ * 记录表与表关联过程中的未知值和不适用值
+ * 数据质量问题分级分类记录
+ * 通过调度日志解耦合调度系统
+ * ... ...
+
+
+### Sharp ETL workflow示意
+
+下面示意一下Sharp ETL的一个workflow,我会分别对于这些step做出解释。
+
+workflow一开始是一个header,描述workflow本身的属性,包括workflow名称、增量还是全量、日志驱动方式、通知方式等等。然后是一系列step,每个step都有自己的输入输出。
+
+第一个step表示从hive中取数据出来写到temp表以便后续使用,这个SQL中也用到了Sharp ETL内置的变量功能,`${YEAR}` `${MONTH}` `${DAY}` 这些变量都会在运行时被替换为真实的值,真实值通过我们内置的调度日志来计算得出。这里请注意在source的options中有配置数据质量检查,这里只需要引用不同的数据质量检查规则,即可对相应字段进行数据质量检查,并根据规则定义的级别进行分级分类管理
+
+第二个step是进行事实表和维度表的关联,并通过`-1`来记录未关联上的数据。
+
+第三个step是对temp的数据做预处理。
+
+第四个step是对hive数据更新的特殊处理,在hive上如果要更新数据就需要把数据对应的分区全部更新掉,所以这里是在计算本次数据涉及到更新哪些分区,以方便后续将这些分区的数据取出来做更新。
+
+第五个step用到了上一个step拼接的查询条件参数,将此次更新设计到的数据分区都取出来。
+
+第六个step是通过Transformer功能调用一段代码来实现拉链表,其实这段代码也是通过拼接SQL实现的,只不过拼接过程较为灵活,才以一个transformer来封装。实际上transformer中可以执行任意代码。transformer本身也有输出,这里我们将输出覆盖到对应的分区,完成任务。
+
+
+```sql
+-- workflow=fact_device
+-- loadType=incremental
+-- logDrivenType=timewindow
+
+-- step=1
+-- source=hive
+-- dbName=ods
+-- tableName=t_device
+-- options
+-- idColumn=order_id
+-- column.device_id.qualityCheckRules=power null check
+-- column.status.qualityCheckRules=empty check
+-- target=temp
+-- tableName=t_device__extracted
+-- writeMode=overwrite
+select
+ `device_id` as `device_id`,
+ `manufacturer` as `manufacturer`,
+ `status` as `status`,
+ `online` as `online`,
+ `create_time` as `create_time`,
+ `update_time` as `update_time`,
+ `year` as `year`,
+ `month` as `month`,
+ `day` as `day`
+from `ods`.`t_device`
+where `year` = '${YEAR}'
+ and `month` = '${MONTH}'
+ and `day` = '${DAY}';
+
+-- step=2
+-- source=temp
+-- tableName=t_device__extracted
+-- target=temp
+-- tableName=t_device__joined
+-- writeMode=append
+select
+ `t_device__extracted`.*,
+ case when `t_dim_user`.`user_code` is null then '-1'
+ else `t_dim_user`.`user_code`
+ end as `user_id`
+from `t_device__extracted`
+left join `dim`.`t_dim_user` `t_dim_user`
+ on `t_device__extracted`.`user_id` = `t_dim_user`.`user_code`
+ and `t_device__extracted`.`create_time` >= `t_dim_user`.`start_time`
+ and (`t_device__extracted`.`create_time` < `t_dim_user`.`end_time`
+ or `t_dim_user`.`end_time` is null);
+
+-- step=3
+-- source=temp
+-- tableName=t_device__joined
+-- target=temp
+-- tableName=t_device__target_selected
+-- writeMode=overwrite
+select
+ `device_id`,
+ `manufacturer`,
+ `user_id`,
+ `status`,
+ `online`,
+ `create_time`,
+ `update_time`,
+ `year`,
+ `month`,
+ `day`
+from `t_device__joined`;
+
+-- step=4
+-- source=hive
+-- dbName=dwd
+-- tableName=t_fact_device
+-- target=variables
+select concat('where (',
+ ifEmpty(
+ concat_ws(')\n or (', collect_set(concat_ws(' and ', concat('`year` = ', `year`), concat('`month` = ', `month`), concat('`day` = ', `day`)))),
+ '1 = 1'),
+ ')') as `DWD_UPDATED_PARTITION`
+from (
+ select
+ dwd.*
+ from `dwd`.`t_fact_device` dwd
+ left join `t_device__target_selected` incremental_data on dwd.device_id = incremental_data.device_id
+ where incremental_data.device_id is not null
+ and dwd.is_latest = 1
+);
+
+-- step=5
+-- source=hive
+-- dbName=dwd
+-- tableName=t_fact_device
+-- target=temp
+-- tableName=t_fact_device__changed_partition_view
+select *
+from `dwd`.`t_fact_device`
+${DWD_UPDATED_PARTITION};
+
+-- step=6
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.SCDTransformer
+-- methodName=transform
+-- createTimeField=create_time
+-- dwUpdateType=incremental
+-- dwViewName=t_fact_device__changed_partition_view
+-- odsViewName=t_device__target_selected
+-- partitionField=create_time
+-- partitionFormat=year/month/day
+-- primaryFields=device_id
+-- surrogateField=
+-- timeFormat=yyyy-MM-dd HH:mm:ss
+-- updateTimeField=update_time
+-- transformerType=object
+-- target=hive
+-- dbName=dwd
+-- tableName=t_fact_device
+-- writeMode=overwrite
+```
+
+### One more thing!
+
+其实上一段SQL是通过代码生成的,并非人工手写!在实际操作中,只需要填写我们的excel [ods模板](https://docs.google.com/spreadsheets/d/1Zn_Q-QUTf6us4RwdgUgBosXL09-D-TowmgwWlDskvlA) 或 [dwd模板](https://docs.google.com/spreadsheets/d/1CetkqBsXj_E8oZBsws9iGdaJB1QJUajnwqH4FoKhXKA)即可生成代码,可以成倍的提高开发效率。
+
+
+*后续文章还将对内置的数据工程实践(包括但不限于“日志驱动”、“数据质量分级分类”等)和 Sharp ETL的实现等等进行介绍*
+
+
diff --git a/website/blog/sharp-etl-introduce-02-beyond-existing-etl.md b/website/blog/sharp-etl-introduce-02-beyond-existing-etl.md
new file mode 100644
index 0000000..9890ff9
--- /dev/null
+++ b/website/blog/sharp-etl-introduce-02-beyond-existing-etl.md
@@ -0,0 +1,139 @@
+---
+slug: sharp-etl-introduce-02-beyond-existing-etl
+title: Sharp ETL介绍(二):超越现有ETL
+tags: [sharp etl, data quality check]
+date: 2022-08-02T00:00:00+08:00
+---
+
+## 导言
+
+
+本文将从以下几个维度展开Sharp ETL的数据工程化实践:
+
+* 通过step组合成为workflow
+* 支持通过自定义代码逻辑扩展
+* 工程化代码生成,固化统一且标准的数据工程实践
+ * 事实表和维度表的关联检查
+ * 记录表与表关联过程中的未知值和不适用值
+ * 数据质量问题分级分类记录
+ * ... ...
+
+
+
+## 通过workflow组织任务逻辑
+
+在软件工程中,处理超长代码的方式可能是将代码逻辑为小而易于理解的单元,然后抽取方法,并给方法起通俗易懂的名字。这使得代码可重用并可以提高可读性。在处理超长SQL时经常会用到`WITH`语法:
+
+```sql
+WITH query_name1 AS (
+ SELECT ...
+ )
+ , query_name2 AS (
+ SELECT ...
+ FROM query_name1
+ ...
+ )
+SELECT ...
+```
+
+不可否认通过with语法重写之后可读性大大提升,但是我们认为这仍然不够。通过Sharp ETL的step拆分过后的SQL可读性更好,且debug更为容易。
+乍一看似乎通过workflow组织的SQL更加长或者复杂,实则不然,每一个step都可以有名称来解释这个step的作用,其实通过with语句组织的SQL也经常需要注释来解释。同时这里还有一个隐藏点:如果source是temp,就可以不用写source,一定程度上能够简化理解。同时因为日志驱动的存在,每一个step在执行时都记录了source和target的条数,这个相对于直接写WITH语法更容易调试和排错。
+
+
+```sql
+-- step=define query name 1
+-- source=source type xxx
+-- target=temp
+-- tableName=query_name1
+SELECT ...
+
+-- step=define query name 2
+-- target=temp
+-- tableName=query_name2
+SELECT ...
+ FROM query_name1
+
+-- step=output
+-- target=target type xxx
+-- tableName=target table
+SELECT ...
+
+```
+
+## workflow的未来
+
+实际上最初版本的workflow是顺序执行的,一个step接着一个step。但在真正使用时我们往往需要例如 分支判断、循环、抛出异常、错误处理分支 等等功能,这些都在Sharp ETL未来的计划中,未来会逐步增加这些功能。
+
+## 通过[`Transformer`](https://github.com/SharpData/SharpETL/blob/97f303cbd1f40a29780551851f690c283bcb2061/spark/src/main/scala/com/github/sharpdata/sharpetl/spark/transformation/Transformer.scala)来进行自定义逻辑扩展
+
+我们可以先看一下`Transformer`的API,它提供了在读写数据时插入自定义逻辑的机制,用户可以轻易的通过实现这个API来插入自己的逻辑。甚至可以通过动态加载scala脚本文件来扩展而不需要重新build jar包,可以参考[这里](/docs/transformer-guide)。
+
+```scala
+trait Transformer {
+
+ /**
+ * read
+ */
+ def transform(args: Map[String, String]): DataFrame = ???
+
+ /**
+ * write
+ */
+ def transform(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = ???
+}
+```
+
+## 工程化代码生成
+
+
+如果是完全手写SQL来完成所有功能,包括数据工程功能,整个逻辑会非常长和复杂。为了降低使用门槛,使得整个过程更加轻松,我们需要通过一些手段将复杂度封装起来。考虑到相同场景下不同表的ETL实现可能是十分类似的,我们想到了通过定义模板来进行数据建模,通过模板来生成workflow这样的方式。你在 [quick start](/docs/quick-start-guide#generate-sql-files-from-excel-config) 里面肯定也见到了通过填写excel模板来生成workflow的方式。实际上我们认为除了具有非常特殊逻辑的任务可以手写以外,绝大多数任务都应该通过模板定义,然后生成。这样有几个好处:
+
+* 避免了手写任务可能造成的数据工程化实践不一致甚至出错
+* 当大量任务需要修改时,只需要修改模板内容,并重新生成即可
+* excel模板作为大家都能理解的中间产物,可以作为数据BA和数据工程师DE的沟通桥梁,降低团队间的沟通和摩擦成本
+* 团队可以不需重复造轮子,在一个新的项目可以快速进入业务开发,减少大量的基础设置投入
+
+
+### 数据质量问题分级分类
+
+“事实表、维度表的关联检查”、“记录表与表关联过程中的未知值和不适用值” 前一篇文章已经提过了,这里重点展开下 “数据质量问题分级分类”
+
+首先看一下我们的数据质量检查配置文件,我们允许通过类SQL或者自定义代码(User Defined Rule)的方式来定义数据质量规则。这其中可以使用 `$column` 来引用被应用规则的column name,可以使用UDF,也可以加载一段代码来实现数据质量检查的规则。
+
+```yaml
+- dataCheckType: power null check
+ rule: powerNullCheck($column)
+ errorType: error
+- dataCheckType: null check
+ rule: $column IS NULL
+ errorType: error
+- dataCheckType: duplicated check
+ rule: UDR.com.github.sharpdata.sharpetl.core.quality.udr.DuplicatedCheck
+ errorType: warn
+- dataCheckType: mismatch dim check
+ rule: $column = '-1'
+ errorType: warn
+```
+
+配置文件中定义的数据质量规则是全局共享的,需要使用数据质量检查的地方可以通过column.column name.qualityCheckRules=rule name引用已有的规则:
+
+```sql
+-- step=1
+-- source=hive
+-- dbName=ods
+-- tableName=t_device
+-- options
+-- idColumn=order_id
+-- column.device_id.qualityCheckRules=power null check
+-- column.status.qualityCheckRules=empty check
+-- target=temp
+-- tableName=t_device__extracted
+-- writeMode=overwrite
+
+select ....
+```
+
+数据质量规则同时支持定义error和warn级别的错误,通常warn级别的错误是可以接受的,而error级别的错误是不被认为可接受的,往往不会进入数据仓库。但是error和warn级别的错误都会分级分类记录下来,方便后续排错或者发送通知来解决问题。
+
+
+*下一篇文章将具体介绍日志驱动的逻辑和实现。*
\ No newline at end of file
diff --git a/website/blog/sharp-etl-introduce-03-when-data-engineering-meets-chaos-engineering.md b/website/blog/sharp-etl-introduce-03-when-data-engineering-meets-chaos-engineering.md
new file mode 100644
index 0000000..357bbff
--- /dev/null
+++ b/website/blog/sharp-etl-introduce-03-when-data-engineering-meets-chaos-engineering.md
@@ -0,0 +1,102 @@
+---
+slug: sharp-etl-introduce-03-when-data-engineering-meets-chaos-engineering
+title: Sharp ETL介绍(三):当数据工程遇到混沌工程
+tags: [sharp etl, log driven, chaos engineering, data engineering]
+date: 2022-08-03T00:00:00+08:00
+---
+
+## 导言
+
+本文将从混沌工程在数据工程领域的遐想来引入“日志驱动”的必要性。
+
+
+
+## 什么是混沌工程?
+
+混沌工程是在系统上进行实验的学科, 目的是建立对系统抵御生产环境中失控条件的能力以及信心。([混沌工程原则](https://principlesofchaos.org/zh/))
+
+现代系统正变得越来越复杂,从单线程到多线程,从单体到微服务,从单节点到高可用,从本地到云端... ...复杂度使得程序产生预期的结果需要越来越多的必要条件,而每种条件都有其自身的成功概率,即使每种条件的成功概率都很高,根据墨菲定律,或早或晚一定会遇到不可预知的结果。
+
+混沌工程通过一下四个步骤来找出系统中隐藏的“混沌”:
+
+ 首先,用系统在正常行为下的一些可测量的输出来定义“稳定状态”。
+ 其次,假设这个在控制组和实验组都会继续保持稳定状态。
+ 然后,在实验组中引入反映真实世界事件的变量,如服务器崩溃、硬盘故障、网络连接断开等。
+ 最后,通过控制组和实验组之间的状态差异来反驳稳定状态的假说。
+
+混沌工程实验是通过向现有系统注入故障,从而发现系统的薄弱点,从而可以有针对性的提高整个系统的健壮性。
+
+## 环境都是高可用(HA)的,应该很健壮啊
+
+有同学可能会有疑问,“我们环境都是高可用(HA)的,应该很健壮啊”。这个问题还是要分开来看,健壮的系统能够忠实的执行程序逻辑并得到最终结果。但是如果程序逻辑是错的呢?或者,程序逻辑没有错但是因为其自身的脆弱性,当系统从错误中恢复时没有处理一些必要的逻辑,导致最终的结果出现了问题。所以系统级别的高可用或者健壮性和程序级别的健壮性是两回事,一定要分开看待,且二者缺一不可。而在数据工程实践中我们往往聚焦于基础设置有没有做到高可用,而忽略了程序逻辑的健壮性。
+
+
+## 仅仅是幂等就足够了吗?
+
+有些同学觉得无所谓,我的程序是幂等的呀,报错了重跑不就完了。这里先讲一下幂等的定义:能够用同样的参数重复执行,并总能得到相同的结果。
+
+那我们可以按照混沌工程的四个步骤来模拟一个场景出来:
+
+1. 用系统在正常行为下的一些可测量的输出来定义“稳定状态”,这里尝试定义“稳定状态”为:
+ 1. ETL计算结果稳定且正确(幂等)
+ 2. 资源队列占用合理,提交的任务不需要等待太久就可以运行
+ 3. 过去的数据不会被重复计算
+ 4. 同一时刻不发生重复计算
+2. 假设这个在控制组和实验组都会继续保持稳定状态。
+3. 在实验组中引入反映真实世界事件的变量:手动调度任务时页面卡住了,习惯性多点了几次,刷新页面后发现调度起来了十几个任务。
+4. 通过控制组和实验组之间的状态差异来反驳稳定状态的假说:
+ * 资源队列被打满,新调度的任务都得排队(不符合稳定状态条件2)
+ * 重跑并覆盖过去已经运行过的数据(计算资源浪费,不符合稳定状态条件3)
+ * 计算同一天的数据几十次,而最终只留了最后那份(计算资源浪费,不符合稳定状态条件4)
+
+通过上面的模拟实验我们可以知道,虽然ETL是满足幂等性(即稳定状态条件1)的,但是由于没有满足其他记得稳定状态所以我们可以说它是存在脆弱性的。
+
+## 常规的任务调度的其他问题
+
+
+
+对于一个daily的任务,理想情况下是每天都会成功,但是实际上肯定会遇到失败的场景,不同的调度引擎往往对于失败的case有不同的处理逻辑。
+这里以忽略过去失败的任务,继续开启下一个调度周期为例,对比实例,跳过了2022-02-02的任务,继续运行 2022-02-03的任务。当发现任务出问题之后,需要手动补数据,手动重新运行2022-02-02的任务,把数据补上去。(这里如果有报表使用了2022-02-02的数据,那么报表的数据肯定是不准确的)
+
+缺点:完全依赖于调度工具的任务历史记录,如果没有配置失败通知机制,需要一个个去看哪个任务挂掉了。如果允许失败任务之后的任务继续运行,可能会导致对顺序有要求的场景下出问题,比如 2月3日的数据在mysql做了upsert操作,2月2日的又做了一次(在修复失败之后),就会导致用旧的数据覆盖新的数据的问题。
+
+**这里也不否认有些调度框架在适当的配置时也能解决上述问题,但是没有人能保证所有的调度框架都能解决上面的问题或者你的项目对于调度框架有自由选择的空间。所以在实践中我们需要在ETL任务和实际调度框架(如airflow)中通过“日志驱动”做一层隔离,加入的这一层统一的逻辑处理可以使得ETL任务在不同的调度框架下行为一致,符合大家的预期,而不是让所有人都精通常见的调度框架。**
+
+
+## 那该怎么办?
+
+针对这个问题,我认为“日志解耦”是必要的解决方案。观察上面的问题可以思考,重复提交的任务应该怎么组织呢?我们需要判断是不是有任务已经提交过了,对不对?同时要考虑到隔离不同调度系统,所以方案自然是需要一个单独的地方保存这些调度日志,以便在调度任务时检查是否需要调度(是否有相同任务在运行或者这个任务是否已经运行过等)。
+
+注意这里的“日志”并不是任务运行的日志,而是调度任务的日志,记录的是那个任务在哪个时间调度,状态是什么等等。
+
+“日志解耦”的结果是通过日志来驱动任务运行,所谓的“日志驱动”,其实和“断点续传”这个概念很像,只不过没有应用在下载文件上,而是应用到了任务调度。日志驱动有几个核心要点:
+
+* 自行记录任务运行历史,而不依赖与调度框架的功能。这样就做到了与不同调度框架解绑;
+* 调度是有序的,上个周期任务失败了,不会跳过它运行下个周期的任务,每次调度还是会先执行之前失败的任务,直到它成功;
+
+**日志驱动也带来了几点好处**:
+
+* 可以解决重复调度的问题,当任务运行后发现有相同任务在运行或者已经运行过了,当前任务可以直接退出或者kill掉之前的任务
+* 补数据操作更加容易实现且灵活而不容易出错
+* 更加灵活的任务依赖配置(任务上下游不一定是同频率或者必须在一个dag里面)
+* 更加灵活的调度起始设置,例如对于kafka offset和自增主键的支持
+* 更加统一且容易的运维操作(不同的项目、不同的调度引擎,都可以基于日志驱动(的表)来进行运维操作)
+* 可以记录更加详细的任务状态,比如读到多少条数据,写了多少条数据等等(特指结构化记录,而不是普通的执行日志),方便做统计查看
+* 可以自行选择事务级别,或者说可以让用户选择是否要“脏读”数据
+* 还有其他的好处,篇幅有限就不在此展开了
+
+
+
+回到现实场景,任务失败的情况大致可以分为两种:
+
+* 重试就可以成功(网络闪崩,排队超时等)
+* 代码、环境有问题需要人工介入的
+
+对于重试就可以成功的情况,往往在下一次调度就可以自动补上之前失败的任务的数据;如果不想等到下一个周期,可以人工马上调度一次。
+
+对于无法重试成功的情况,往往每次调度都会挂掉,但是只会尝试最开始的那天的任务,因为前置的任务没有成功,只是在每天重试 2022-02-02 的任务;
+无法重试成功的任务,仍然需要人工介入,修复(环境、逻辑、上游数据等问题)之后,自动(按顺序)补上之前挂掉的任务的数据;
+
+## 总结
+
+通过混沌工程的虚拟实验我们知道常规的任务调度并不够稳定,而日志驱动的加入可以让ETL任务更加稳定。同时日志驱动也带来了诸多好处,不仅仅解决了混沌工程的稳定性问题。
diff --git a/website/blog/sharp-etl-introduce-04-log-driven-implementation.md b/website/blog/sharp-etl-introduce-04-log-driven-implementation.md
new file mode 100644
index 0000000..6d15ee0
--- /dev/null
+++ b/website/blog/sharp-etl-introduce-04-log-driven-implementation.md
@@ -0,0 +1,96 @@
+---
+slug: sharp-etl-introduce-04-log-driven-implementation
+title: Sharp ETL介绍(四):日志驱动实现
+tags: [sharp etl, log driven]
+date: 2022-08-04T00:00:00+08:00
+---
+
+## 导言
+
+本文将具体展开日志驱动的实现逻辑和细节
+
+
+
+## 日志驱动的执行逻辑
+
+
+
+## 待执行队列的具体计算逻辑
+
+* [time-based](https://github.com/SharpData/SharpETL/blob/97f303cbd1f40a29780551851f690c283bcb2061/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala#L189)
+
+ 包括 db的增量和全量、API的增量和全量、文件的增量和全量 等
+
+ `dataRangeStart`取 ‘开始的时间’ option `--default-start` 和 ‘上次成功任务的’ `dataRangeEnd` 的最大值,
+ 频率 取 option `--period`,根据这两个值计算`dataRangeEnd`,简易计算逻辑:
+
+ 应该schedule的次数 = (‘当前时间’ - `dataRangeStart`) / `period` (取整)
+
+ 对于一个没有运行过的任务,假设`--default-start`为 20220101000000,`--period`(单位:分钟)是 1440,就是说从2022-01-01 00:00:00开始每天运行一次任务,如果当前时间是2022-01-05 14:00:00,这时就会schedule 4个任务(20220101000000-20220102000000, 20220102000000-20220103000000, 20220103000000-20220104000000, 20220104000000-20220105000000)。
+
+ 对于一个运行过的任务,之前已经设定过`--default-start`为 20220101000000,但是已经运行过一段时间,假设当前任务对应的`dataRangeEnd`的最大值为20220107000000,当前时间为2022-01-08 14:00:00,那么会schedule 1个任务(20220107000000-20220108000000)。
+
+* [auto-incremental primary key](https://github.com/SharpData/SharpETL/blob/97f303cbd1f40a29780551851f690c283bcb2061/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala#L148)
+
+ 数据库自增主键场景
+ `dataRangeStart`取 ‘开始主键值’ option `--default-start`(默认:0) 和 ‘上次成功任务的’ `dataRangeEnd` 的最大值,
+ `dataRangeEnd`取值 `max(primary key)`, 任务运行结束后会更新这次任务的实际取到的最大主键值到`dataRangeEnd`中。
+
+* [kafka topic](https://github.com/SharpData/SharpETL/blob/97f303cbd1f40a29780551851f690c283bcb2061/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala#L95)
+
+ 与自增主键类似,
+ `dataRangeStart`取 ‘开始主键值’ option `--default-start`(默认:`earlist`) 和 ‘上次成功任务的’ `dataRangeEnd` 的最大值,
+ `dataRangeEnd`取值 `latest`, 任务运行结束后会更新这次任务的实际取到的最大offset到`dataRangeEnd`中。
+
+* [upstream](https://github.com/SharpData/SharpETL/blob/97f303cbd1f40a29780551851f690c283bcb2061/core/src/main/scala/com/github/sharpdata/sharpetl/core/api/LogDrivenInterpreter.scala#L173)
+
+ upstream方式驱动的任务有点特殊,它往往是贴源层(ODS)之后的层需要的,例如我们在明细层(DWD)创建一个新的任务,这个任务天然有依赖,它依赖于贴源层(ODS)的任务,这个时候我们不像针对当前任务多做配置,而是希望当前任务完全fellow贴源层(ODS)任务的配置,只要ODS任务跑过了,就会自动跑DWD的任务。
+ 这种方式还有一个额外的好处,如果ODS的任务需要重刷,如果DWD是timewindow的任务,则需要重新配置,但是如果DWD是upstream,则不需要配置,只要ODS重刷了,DWD也会跟着重刷,它们完全同频,大大降低了配置任务依赖的难度。
+
+
+## 日志驱动的表设计
+
+日志驱动目前的实现主要有两张表 `job_log`, `step_log`
+
+```sql
+create table job_log
+(
+ job_id bigint auto_increment primary key,
+ workflow_name varchar(128) charset utf8 not null,
+ `period` int not null,
+ job_name varchar(128) charset utf8 not null,
+ data_range_start varchar(128) charset utf8 null,
+ data_range_end varchar(128) charset utf8 null,
+ job_start_time datetime null,
+ job_end_time datetime null,
+ status varchar(32) charset utf8 not null comment 'job status: SUCCESS,FAILURE,RUNNING',
+ create_time datetime default CURRENT_TIMESTAMP not null comment 'log create time',
+ last_update_time datetime default CURRENT_TIMESTAMP not null comment 'log update time',
+ load_type varchar(32) null,
+ log_driven_type varchar(32) null,
+ file text charset utf8 null,
+ application_id varchar(64) charset utf8 null,
+ project_name varchar(64) charset utf8 null,
+ runtime_args text charset utf8 null
+) charset = utf8;
+
+create table step_log
+(
+ job_id bigint not null,
+ step_id varchar(64) not null,
+ status varchar(32) not null,
+ start_time datetime not null,
+ end_time datetime null,
+ duration int(11) unsigned not null,
+ output text not null,
+ source_count bigint null,
+ target_count bigint null,
+ success_count bigint null comment 'success data count',
+ failure_count bigint null comment 'failure data count',
+ error text null,
+ source_type varchar(32) null,
+ target_type varchar(32) null,
+ primary key (job_id, step_id)
+) charset = utf8;
+
+```
\ No newline at end of file
diff --git a/website/blog/sharp-etl-introduce-05-workflow-in-a-glance.md b/website/blog/sharp-etl-introduce-05-workflow-in-a-glance.md
new file mode 100644
index 0000000..46b21f6
--- /dev/null
+++ b/website/blog/sharp-etl-introduce-05-workflow-in-a-glance.md
@@ -0,0 +1,132 @@
+---
+slug: sharp-etl-introduce-05-workflow-in-a-glance
+title: Sharp ETL介绍(五):Workflow入门
+tags: [sharp etl, workflow]
+date: 2022-08-05T00:00:00+08:00
+---
+
+## 导言
+
+
+本文将快速讲解workflow的基本用法,包括
+
+* 变量
+* 临时表
+* 控制流 workflow_spec.sql
+* step读写数据
+* 数据源
+* 扩展
+ * UDF
+ * Transformer
+ * 自定义数据源
+
+
+
+## 变量
+
+Sharp ETL对变量有丰富的支持,包括任务运行内建的变量和用户自定义变量。
+
+内置变量包括
+
+* `${DATA_RANGE_START}`
+* `${DATA_RANGE_END}`
+* `${JOB_ID}`
+* `${JOB_NAME}`
+* `${WORKFLOW_NAME}`
+
+针对timewindow任务还包括
+
+* `${YEAR}`
+* `${MONTH}`
+* `${DAY}`
+* `${HOUR}`
+* `${MINUTE}`
+
+用户可以在任意step中新增或覆盖变量,声明结束后,后续step即可使用该变量,例如
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+select from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy') as `YEAR`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'MM') as `MONTH`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'dd') as `DAY`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR`,
+ 'temp_source' as `sources`,
+ 'temp_target' as `target`,
+ 'temp_end' as `end`
+```
+
+## 临时表
+
+临时表是Sharp ETL能够将复杂任务拆分的基础,当前Spark的实现就是使用了Spark的临时表。一段复杂的逻辑可以拆分为输出到多个临时表来简化逻辑。对于从temp表读数据的step而言,source可以忽略掉,不写source默认认为从temp读取数据。
+
+```sql
+-- step=1
+-- target=temp
+-- tableName=temp_table
+select 'SUCCESS' as `RESULT`;
+
+-- step=2
+-- target=console
+select * from temp_table;
+```
+
+
+
+## 数据源
+
+每个step都有source和target两个配置,具体配置使用可以参考 [Datasource](/docs/datasource) 这一节来使用。同一个workflow里面datasource之间可以任意组合使用,没有严格限制,用户也可以很方便的自定义新的数据源。
+
+
+## 扩展
+
+Sharp ETL从设计之初就一直考虑让用户可以很方便的扩展功能,无论是在step设计、transformer设计、UDF、动态加载transformer脚本、自定义数据质量规则、自定义数据质量检查脚本都能够很好的支持用户实现自定义逻辑。未来还会支持 分支判断、循环、抛出异常、错误处理分支 等等控制流,使得Sharp ETL的workflow更加像一个编程语言,这样用户就可以完全依赖于SQL来实现所有的功能。
+
+### UDF
+
+用户可以通过build自己的jar包来实现UDF的支持,这个jar包不需要基于Sharp ETL来实现,甚至仅仅是普通的Scala function即可。
+
+例如,用户需要注册一个新的UDF来实现自定义逻辑,只需要编写普通代码即可:
+
+```scala
+class TestUdfObj extends Serializable {
+ def testUdf(value: String): String = {
+ s"$value-proceed-by-udf"
+ }
+}
+```
+
+打包完成后需要将jar包与Sharp ETL的jar包一起提交,这样就可以很轻易的引用自己的UDF了。
+
+```bash
+spark-submit --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/spark-1.0.0-SNAPSHOT.jar /path/to/your-udf.jar ... ...
+```
+
+
+```sql
+-- step=1
+-- source=class
+-- className=com.github.sharpdata.sharpetl.spark.end2end.TestUdfObj
+-- target=udf
+-- methodName=testUdf
+-- udfName=test_udf
+
+-- step=2
+-- source=temp
+-- target=temp
+-- tableName=udf_result
+select test_udf('input') as `result`;
+```
+
+### Transformer
+
+Transformer的相关详细使用可以参考 [Transformer](/docs/transformer-guide)。
+
+### 自定义数据源
+
+自定义数据源的相关详细使用可以参考 [自定义数据源](/docs/custom-datasource-guide)。
+
+### 配置项
+
+Sharp ETL配置项、Spark conf配置、系统连接信息配置等可以参考 [Properties file config](/docs/properties-file-config)。
\ No newline at end of file
diff --git a/website/docs/UDF-guide.md b/website/docs/UDF-guide.md
new file mode 100644
index 0000000..d9d0f70
--- /dev/null
+++ b/website/docs/UDF-guide.md
@@ -0,0 +1,41 @@
+---
+title: "UDF guide"
+sidebar_position: 10
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+## UDF
+
+用户可以通过build自己的jar包来实现UDF的支持,这个jar包不需要基于Sharp ETL来实现,甚至仅仅是普通的Scala function即可。
+
+例如,用户需要注册一个新的UDF来实现自定义逻辑,只需要编写普通代码即可:
+
+```scala
+class TestUdfObj extends Serializable {
+ def testUdf(value: String): String = {
+ s"$value-proceed-by-udf"
+ }
+}
+```
+
+打包完成后需要将jar包与Sharp ETL的jar包一起提交,这样就可以很轻易的引用自己的UDF了。
+
+```bash
+spark-submit --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/spark-1.0.0-SNAPSHOT.jar /path/to/your-udf.jar ... ...
+```
+
+```sql
+-- step=1
+-- source=class
+-- className=com.github.sharpdata.sharpetl.spark.end2end.TestUdfObj
+-- target=udf
+-- methodName=testUdf
+-- udfName=test_udf
+
+-- step=2
+-- source=temp
+-- target=temp
+-- tableName=udf_result
+select test_udf('input') as `result`;
+```
diff --git a/website/docs/batch-job-guide.md b/website/docs/batch-job-guide.md
new file mode 100644
index 0000000..a494b26
--- /dev/null
+++ b/website/docs/batch-job-guide.md
@@ -0,0 +1,67 @@
+This guide provides a quick guide for commandline `batch-job`
+
+## Introduction
+
+The command `batch-job` runs all jobs in batch each time and should be noted as one of arguments when running a job. For example, when running a sample job,and the command is as follows:
+
+```bash
+# run all job in batch by `spark-submit`
+spark-submit --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/spark-1.0.0-SNAPSHOT.jar batch-job -f ~/Desktop/sharp-etl-Quick-Start-Guide.xlsx --default-start-time="2021-09-30 00:00:00" --local --once
+
+# run all job locally
+./gradlew :spark:run --args="batch-job -f ~/Desktop/sharp-etl-Quick-Start-Guide.xlsx --default-start-time='2021-09-30 00:00:00' --local --once"
+```
+
+## Parameters
+
+### common command params
+
+1. `--local`
+
+Declare that the job is running in standalone mode. If `--local` not provided, the job will try running with Hive support enabled.
+
+2. `--release-resource`
+
+The function is to automatically close spark session after job completion.
+
+3. `--skip-running`
+
+When there is a flash crash, use `--skip-running` to set last job status(in running state) as failed and start a new one.
+
+4. `--default-start` / `--default-start-time`
+
+Specify the default start time(eg, 20210101000000)/incremental id of this job. If the command is running for the first time, the default time would be the time set by the argument. If not, the argument would not work.
+
+5. `--once`
+
+It means that the job only run one time(for testing usage).
+
+6. `--env`
+
+Specify the default env path: local/test/dev/qa/prod running the job.
+
+7. `--property`
+
+Using specific property file, eg `--property=hdfs:///user/admin/etl-conf/etl.properties`
+
+8. `--override`
+
+Overriding config in properties file, eg `--override=etl.workflow.path=hdfs:///user/hive/sharp-etl,a=b,c=d`
+
+### batch-job params
+
+1. `--names`
+
+Specify the names of the job to run.
+
+2. `-f` / `--file`
+
+Specify excel file to run.
+
+3. `--period`
+
+Specify the period of job execution.
+
+4. `-h` / `--help`
+
+Take an example of parameters and its default value is false.
diff --git a/website/docs/custom-datasource-guide.md b/website/docs/custom-datasource-guide.md
new file mode 100644
index 0000000..c7df7e6
--- /dev/null
+++ b/website/docs/custom-datasource-guide.md
@@ -0,0 +1,93 @@
+---
+title: "custom-datasource-guide"
+sidebar_position: 10
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+## 自定义数据源
+
+实现自定义数据源包括两个API:[Source](https://github.com/SharpData/SharpETL/blob/main/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Source.scala)和[Sink](https://github.com/SharpData/SharpETL/blob/main/core/src/main/scala/com/github/sharpdata/sharpetl/core/datasource/Sink.scala)。
+
+```scala
+trait Source[DataFrame, Context] extends Serializable {
+ def read(step: WorkflowStep, jobLog: JobLog, executionContext: Context, variables: Variables): DataFrame
+}
+
+trait Sink[DataFrame] extends Serializable {
+ def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit
+}
+```
+
+:::tip
+针对支持JDBC的数据源无需额外实现,只需提交任务时提供该数据源的JDBC driver jar和在properties文件中配置链接即可, 例如对于informix的JDBC支持需要配置:
+
+```properties
+sysmaster.informix.url=jdbc:informix-sqli://localhost:9088/sysmaster:INFORMIXSERVER=informix;DELIMIDENT=Y
+sysmaster.informix.user=informix
+sysmaster.informix.password=in4mix
+sysmaster.informix.driver=com.informix.jdbc.IfxDriver
+sysmaster.informix.fetchsize=100
+```
+:::
+
+## 以DataBricks Delta为例扩展新的数据源
+
+### 创建Config class
+
+因为Delta可以看做是表结构,所以这里沿用了`DBDataSourceConfig`, 你也可以根据实际情况按需添加新的配置。
+
+
+```scala
+import com.github.sharpdata.sharpetl.core.annotation.configFor
+
+@configFor(types = Array("delta_lake"))
+class DeltaLakeDataSourceConfig extends DBDataSourceConfig {
+ @BeanProperty
+ var yourConfig: String = _
+}
+```
+
+:::tip
+注意这里使用了`@configFor`注册了新的数据源类型,注册过后便可以在workflow里面作为source或target的值使用。
+:::
+
+### 实现 Source&Sink API
+
+这里为了方便阅读,代码经过一定程度的简化。
+
+```scala
+import com.github.sharpdata.sharpetl.core.api.Variables
+import com.github.sharpdata.sharpetl.core.datasource.{Sink, Source}
+import com.github.sharpdata.sharpetl.core.datasource.config.DBDataSourceConfig
+import com.github.sharpdata.sharpetl.core.repository.model.JobLog
+import com.github.sharpdata.sharpetl.core.syntax.WorkflowStep
+import com.github.sharpdata.sharpetl.core.annotation._
+import com.github.sharpdata.sharpetl.core.util.ETLLogger
+import org.apache.spark.sql.{DataFrame, SparkSession}
+
+@source(types = Array("delta_lake"))
+@sink(types = Array("delta_lake"))
+class DeltaLakeDataSource extends Source[DataFrame, SparkSession] with Sink[DataFrame] {
+
+ override def read(step: WorkflowStep, jobLog: JobLog, executionContext: SparkSession, variables: Variables): DataFrame = {
+ spark
+ .read
+ .format("delta")
+ .load(s"$deltaLakeBasePath/${step.source.asInstanceOf[DBDataSourceConfig].tableName}")
+ }
+
+ override def write(df: DataFrame, step: WorkflowStep, variables: Variables): Unit = {
+ df
+ .write
+ .format("delta")
+ .mode(step.getWriteMode)
+ .save(targetPath)
+ }
+}
+
+```
+
+:::tip
+注意这里使用了`@source`和`@sink`与数据源类型做关联,一个DataSource可以同时关联多个数据类型(只要他们的实现是一致的),也可以只实现Source或者Sink API。
+:::
\ No newline at end of file
diff --git a/website/docs/datasource.md b/website/docs/datasource.md
new file mode 100644
index 0000000..dc0e987
--- /dev/null
+++ b/website/docs/datasource.md
@@ -0,0 +1,1041 @@
+---
+title: "Datasource"
+sidebar_position: 1
+toc: true
+last_modified_at: 2022-08-05T18:25:57-04:00
+---
+
+## 支持的数据源
+
+## Read
+### hive
+
+ 该 step 直接通过 SparkSession (需启用 hive 支持)执行 hive sql 并返回 DataFrame 以供后续操作。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | -------------- |
+ | dataSourceType | 无 | 否 | 可选值:hive。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hive
+ -- target=console
+ select *
+ from table1;
+ ```
+
+### temp
+
+ 该 step 直接通过 SparkSession (不需启用 hive 支持)执行 hive sql 并返回 DataFrame 以供后续操作。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | -------------- |
+ | dataSourceType | 无 | 否 | 可选值:temp。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=temp
+ -- tableName=temp_table
+ select 1 as a;
+
+ -- step=2
+ -- source=temp
+ -- target=console
+ select *
+ from temp_table;
+ ```
+
+### jdbc
+
+ - mysql
+ - oracle
+ - postgres
+ - ms_sql_server
+ - impala
+ - informix
+
+
+ jdbc 类型的操作支持的配置项基本一致,区别只是数据源不同。
+
+ jdbc 类操作需在 application.properties 中配置基本连接信息,格式为 `${dbName}.${dataSourceType}.*`,示例:
+
+ ```properties
+ test.postgres.driver=org.postgresql.Driver
+ test.postgres.fetchsize=10
+ test.postgres.url=jdbc:postgresql://localhost:5432/default?currentSchema="default"
+ test.postgres.user=root
+ test.postgres.password=root
+ ```
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:mysql、oracle、postgres、ms_sql_server、impala。 |
+ | dbName | 无 | 是 | 如果指定了 dbName,会使用 application.properties 配置文件中以指定的 dbName 开头的配置项去读数据,不指定默认无前缀。 |
+ | numPartitions | 无 | 是 | 需要将一个大的查询拆分为多少个可并行的小查询。 |
+ | partitionColumn | 无 | 是 | 拆分查询依据的字段。 |
+ | lowerBound | 无 | 是 | 拆分查询依据字段的最小值(spark2.3仅支持数字类型)。 |
+ | upperBound | 无 | 是 | 拆分查询依据字段的最大值(spark2.3仅支持数字类型)。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=postgres
+ -- dbName=test
+ -- target=variables
+ select cast(min(id) as int8) as lowerBound,
+ cast(max(id) as int8) as upperBound
+ from test_table;
+
+ -- step=2
+ -- source=postgres
+ -- dbName=test
+ -- numPartitions=10000
+ -- lowerBound=${lowerBound}
+ -- upperBound=${upperBound}
+ -- partitionColumn=id
+ -- target=hive
+ -- tableName=ods_test_table
+ -- writeMode=overwrite
+ select *
+ from test_table;
+ ```
+
+### kudu
+
+ 该 step 通过 kudu spark 去读取 kudu 表中的数据,之后根据 selectSql 对 load 出来的数据做其他操作(基本的查询谓词可下推到 kudu)。
+
+ kudu 表的读取需在 application.properties 中配置 `kudu.master`,示例:
+
+ ```properties
+ kudu.master=localhost:7051
+ ```
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | -------------- |
+ | dataSourceType | 无 | 否 | 可选值:kudu。 |
+ | dbName | 无 | 否 | kudu库名。 |
+ | tableName | 无 | 否 | kudu表名。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=kudu
+ -- dbName=test_db
+ -- tableName=test_table
+ -- target=console
+ select *
+ from test_table;
+ ```
+
+### impala_kudu
+
+ impale_kudu 与 kudu 查询的逻辑基本一致,都是通过 kudu spark 去操作数据,但不同的是 impale_kudu 是操作由 impala 托管的 kudu 表。由于 impala 默认会在建表时给表名加一个前缀,因此我们在 impala 中操作 kudu 表时使用的表名可能是 'test_db.test_table',但通过 kudu 直接去操作时表名可能就是 `impala::test_db.test_table` 了。
+
+ 因此,需要在 application.properties 配置文件中额外配置 `kudu.table.prefix` 参数。
+
+ ```properties
+ kudu.master=localhost:7051
+ kudu.table.prefix=impala::
+ ```
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | --------------------- |
+ | dataSourceType | 无 | 否 | 可选值:impala_kudu。 |
+ | dbName | 无 | 否 | kudu库名。 |
+ | tableName | 无 | 否 | kudu表名。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=impala_kudu
+ -- dbName=test_db
+ -- tableName=test_table
+ -- target=console
+ select *
+ from test_table;
+ ```
+
+### 文件传输类
+
+ 该 step 只是 copy 文件,不解析文件内容(不涉及 spark 操作)。暂时只支持 copy 文件到 hdfs。
+
+ 文件传输类操作配置内容基本一致,区别只是数据源不同。
+
+ 文件传输类操作需要在 application.properties 中配置服务器连接信息,格式为 `${configPrefix}.${dataSourceType}.*`,示例:
+
+ ```properties
+ test.ftp.host=localhost
+ test.ftp.port=21
+ test.ftp.user=root
+ test.ftp.password=root
+ test.ftp.dir=/test
+ test.ftp.localTempDir=/tmp/ftp/test
+ test.ftp.hdfsTempDir=/tmp/ftp/test
+
+ test.scp.host=localhost
+ test.scp.port=22
+ test.scp.user=root
+ test.scp.password=root
+ test.scp.dir=/test
+ test.scp.localTempDir=/tmp/scp/test
+ test.scp.hdfsTempDir=/tmp/scp/test
+
+ test.sftp.host=localhost
+ test.sftp.port=22
+ test.sftp.user=root
+ test.sftp.password=root
+ test.scp.proxy.host=localhost
+ test.scp.proxy.port=22
+ ```
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:scp、ftp、hdfs、sftp、mount。 |
+ | configPrefix | 无 | 否 | 需读取文件的系统连接配置前缀。 |
+ | fileDir | 无 | 事 | 如果在 application.properties 中已配置过 `${configPrefix}.${dataSourceType}.dir` 参数,则此处无需再次指定 `fileDir`,如果重复指定,会已 `fileDir` 指定的目录为准。 |
+ | fileNamePattern | .* | 否 | 文件匹配正则。分三段,格式为:"(前缀)(文件名主体)(后缀)"。 一般情况下前后缀都给个空括号即可。 个别情况下读取文件时需要根据同目录下的标志文件判断文件是否已写出完整。例如:正则为:`()(a\.txt)(\.OK)`,则目录下只存在 `a.txt` 文件时不会读取此文件,只有目录下同时存在 `a.txt.OK` 文件时才会去读取文件 `a.txt`。 |
+ | deleteSource | false | 否 | 是否在文件复制完成后删除原始文件,默认false,不删除。 |
+ | decompress | false | 否 | 是否需要解压后上传到 hdfs,默认 fasle,不解压。 |
+ | codecExtension | 无 | 是 | 需解压后上传到 hdfs 时,指定压缩格式后缀,例如:gz、zip。默认为空,不解压。 |
+ | onlyOneName | false | 否 | 文件名是否始终不变。 如果文件名始终不变,则每次执行此 step 都会重复读取同一文件,反之则在第一次成功读取后就不会再次读取该文件了。 |
+
+ - scp
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=scp
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test\..+)(OK)
+ -- deleteSource=false
+ -- target=hdfs
+ -- configPrefix=test
+ -- writeMode=overwrite
+ ```
+
+ - ftp
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=ftp
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test\..+)(OK)
+ -- deleteSource=false
+ -- target=hdfs
+ -- configPrefix=test
+ -- writeMode=overwrite
+ ```
+
+ - hdfs
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hdfs
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+)()
+ -- target=ftp
+ -- configPrefix=test
+ -- filePath=/test
+ -- writeMode=overwrite
+ ```
+
+ - sftp
+
+ 对于sftp,会将匹配的文件先下载到本地,然后再将本地文件上传到hdfs
+
+ 参数
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------------- | ------ | -------- | ------------------------------------------------------------ |
+ | sourceDir | 无 | 否 | 表示sftp服务器里的绝对路径 |
+ | tempDestination | 无 | 否 | sftp文件存在的本地的临时路径 |
+ | tempDestinationDirPermission | rw-rw---- | 是 | 表示临时路径的文件权限 |
+ | hdfsDir | 无 | 否 | | 上到到hdfs的路径 |
+ | filterByTime | false | 是 | 可填true/false, 过滤sftp上的文件时,是否按文件修改时间过滤,如果是true,则会除了文件名正则匹配,也需要文件修改时间在[dataRangeStart, dataRangeEnd)之间 |
+ | timezone | GMT+8 | 是 | ETL Frames所在服务器的时区 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=sftp
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+)()
+ -- sourceDir=/Distribution/NMBOData
+ -- tempDestinationDir=/data1/ticketflap/tmp
+ -- hdfsDir=hdfs:///data/ticketflap/
+ -- filterByTime=false
+ -- target=ftp
+ -- configPrefix=test
+ -- filePath=/test
+ -- writeMode=overwrite
+ ```
+
+ - mount(sharefolder)
+
+ 参数
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------------- | ------ | -------- | ------------------------------------------------------------ |
+ | sourceDir | 无 | 否 | 表示sftp服务器里的绝对路径 |
+ | tempDestination | 无 | 否 | sftp文件存在的本地的临时路径 |
+ | tempDestinationDirPermission | rw-rw---- | 是 | 表示临时路径的文件权限 |
+ | hdfsDir | 无 | 否 | | 上到到hdfs的路径 |
+ | filterByTime | false | 是 | 可填true/false, 过滤sharefolder上的文件时,是否按文件修改时间过滤,如果是true,则会除了文件名正则匹配,也需要文件修改时间在[dataRangeStart, dataRangeEnd)之间 |
+ | timezone | GMT+8 | 是 | ETL Framework所在服务器的时区 |
+
+ 示例
+ ```sql
+ -- step=1
+ -- source=mount
+ -- fileNamePattern=[\s\S]*.csv.\w*
+ -- sourceDir=/mnt/ltg/Archive
+ -- tempDestinationDir=/data1/shared_folder/tmp
+ -- hdfsDir=hdfs:///data/ltg
+ -- target=variables
+ -- checkPoint=false
+ -- dateRangeInterval=0
+ ```
+
+### hdfs 文件
+
+ 该 step 会读取并解析文件内容,返回一个 DataFrame 以供后续使用。
+
+ hdfs 文件类 step 都有一部分相同的基本参数,如下:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:hdfs、json、csv、excel。 |
+ | configPrefix | 无 | 否 | 需读取文件的系统连接配置前缀。 |
+ | fileDir | 无 | 事 | 如果在 application.properties 中已配置过 `${configPrefix}.${dataSourceType}.dir` 参数,则此处无需再次指定 `fileDir`,如果重复指定,会已 `fileDir` 指定的目录为准。 |
+ | fileNamePattern | .* | 否 | 文件匹配正则。分三段,格式为:"(前缀)(文件名主体)(后缀)"。 一般情况下前后缀都给个空括号即可。 个别情况下读取文件时需要根据同目录下的标志文件判断文件是否已写出完整。例如:正则为:`()(a\.txt)(\.OK)`,则目录下只存在 `a.txt` 文件时不会读取此文件,只有目录下同时存在 `a.txt.OK` 文件时才会去读取文件 `a.txt`。 |
+ | deleteSource | false | 否 | 是否在文件复制完成后删除原始文件,默认false,不删除。 |
+
+ - hdfs
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ----------------- | ------ | -------- | ------------------------------------------------------------ |
+ | codecExtension | 无 | 是 | 需解压后上传到 hdfs 时,指定压缩格式后缀,例如:gz、zip。默认为空,不解压。 |
+ | separator | 无 | 是 | 按分隔符解析文本时使用的分割符。separator 与 fieldLengthConfig 只配置一个即可。 |
+ | fieldLengthConfig | 无 | 是 | 按字段长度解析文本时各字段的长度配置(字节数)。separator 与 fieldLengthConfig 只配置一个即可。 |
+ | strictColumnNum | false | 否 | 文件列数是否需要严格一致,默认不需要。例如:源文件包含 30 个字段,目标表需要 20 个字段,非严格模式直接取文件前 20 列,严格模式会过滤掉列数不一致的数据行。 |
+
+ 分隔符分割文本示例:
+
+ ```sql
+ -- step=1
+ -- source=hdfs
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+\.gz)()
+ -- encoding=utf-8
+ -- codecExtension=.gz
+ -- separator=\|\+\|
+ -- target=hive
+ -- tableName=test_table
+ -- writeMode=overwrite
+ ```
+
+ 固定字段长度文本示例:
+
+ ```sql
+ -- step=1
+ -- source=hdfs
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+\.gz)()
+ -- encoding=gbk
+ -- codecExtension=
+ -- fieldLengthConfig=28,10,3,23,23,23
+ -- deleteSource=false
+ -- target=hive
+ -- tableName=test_table
+ -- writeMode=overwrite
+ ```
+
+ - json
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | --------- | ---------- | -------- | ------------------------------------------------------------ |
+ | multiline | false | 否 | 一个 json 串是否可能由多行数据组成,默认false,启用此功能性能影响较大,如无必要不建议启用。 |
+ | mode | PERMISSIVE | 是 | 是否启用严格模式,严格模式会严格校验字段数。默认不启用。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=json
+ -- configPrefix=test
+ -- fileNamePattern=()(test_\d{14}\.json)()
+ -- multiline=false
+ -- target=temp
+ -- tableName=temp
+ ```
+
+ - csv
+
+ 暂时只是简单实现,全部使用代码中指定的默认参数,无可自定义参数。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ----------- | ------ | -------- | ------------------------------------------------------------ |
+ | inferSchema | true | 否 | 是否启用 schema 推测,默认 true。 |
+ | encoding | utf-8 | 否 | 文件编码。默认 utf-8。 |
+ | sep | , | 否 | 分隔符,默认英文逗号。 |
+ | header | true | 否 | 文件内是否包含表头,默认包含。 |
+ | selectExpr | * | 否 | 每个 csv 需要返回的字段列表,多个字段逗号分隔。使用场景如下: 一次性加载多个 csv 文件,此时可以通过指定返回字段避免因为列数、列序不一致而导致的程序报错(ps:列数、列序可以不一致,但是必需的字段必需全部包含)。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=csv
+ -- configPrefix=test
+ -- fileNamePattern=()(test\.csv)()
+ -- selectExpr=field1,field2
+ -- target=temp
+ -- tableName=temp
+ ```
+
+ - excel
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ----------------------- | ------------------- | -------- | ------------------------------------------------------------ |
+ | header | true | 否 | 是否包含表头,默认 true,包含表头。 |
+ | treatEmptyValuesAsNulls | false | 否 | 是否将空值转换为 null,默认 false,不转换。 |
+ | inferSchema | false | 否 | 是否启用结构推断,默认 false,不启用。 |
+ | addColorColumns | false | 否 | 是否额外添加列颜色字段,默认 false,不添加。 |
+ | dataAddress | 无 | 是 | 数据地址,默认 A1 ,可部分设置,只设置 sheet 页或只设置开始单元格位置都可以。 例:'Sheet2'!A1:D3 '${sheet页名称}'!${开始单元格位置}:${终止单元格位置} ${sheet页下标}!${开始单元格位置}:${终止单元格位置} |
+ | timestampFormat | MM-dd-yyyy HH:mm:ss | 是 | 默认 yyyy-mm-dd hh:mm:ss[.fffffffff] |
+ | maxRowsInMemory | 无 | 是 | 读取超大文档可设置此值,会使用 streaming reader。 |
+ | excerptSize | 无 | 是 | 结构推断时使用的数据行数,默认 10。 |
+ | workbookPassword | 无 | 是 | 文档密码,默认 null,无密码。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=excel
+ -- configPrefix=test
+ -- fileNamePattern=()(^BA\.xlsx$)()
+ -- dataAddress=0!A1
+ -- onlyOneName=true
+ -- target=console
+ ```
+
+### udf注册
+
+ 该 step 在 read 时会将指定类路径下的类加载到内存中。普通的 object 和 class 没什么差别,可以混着用。如果是带参的 class ,需要修改代码支持(pmml即是这种类型)。
+
+ 在 write 时将此方法注册为指定名称的 udf 函数,生效范围为当前 SparkSession 的生命周期。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ----------------------- |
+ | dataSourceType | 无 | 否 | 可选值:object、class。 |
+ | className | 无 | 否 | 需要加载的类路径。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=object
+ -- className=com.box.datapipeline.udf.DesDecryptUDF
+ -- target=udf
+ -- methodName=desDecryptHex
+ -- udfName=des_decrypt
+
+ -- step=2
+ -- source=hive
+ -- target=hive
+ -- tableName=test_table
+ -- writeMode=overwrite
+ select des_decrypt(trim(a), '11111111', 'GBK') as actnum
+ from test_table;
+ ```
+
+ - object
+
+ - class
+
+ - pmml
+
+ 此处的 pmml 是一种带参的 class 反射,需要在配置中指定 pmml 文件路径。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:pmml。 |
+ | className | 无 | 否 | 需要加载的类路径,`com.github.sharpdata.sharpetl.spark.udf.PmmlUDF`。 |
+ | pmmlFileName | 无 | 否 | 预测需要使用的模型文件名称。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=pmml
+ -- className=com.github.sharpdata.sharpetl.spark.udf.PmmlUDF
+ -- pmmlFileName=test_pmml.pmml
+ -- target=udf
+ -- methodName=predict
+ -- udfName=predict
+
+ -- step=2
+ -- source=hive
+ -- target=hive
+ -- tableName=test_table
+ select id,
+ predict(
+ struct(
+ *
+ )
+ ) as result
+ from temp;
+ ```
+
+### bigquery
+
+ 参数
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ----------------------- |
+ | dataSourceType | 无 | 否 | 值必须为bigquery |
+ | system | 无 | 否 | 通过bigquery.${system}.*来获取数据源的配置信息 |
+
+ bigquery在application.properties里的配置信息
+
+ ```properties
+ bigquery.test.proxyAddress=localhost:8080
+ bigquery.test.parentProject=project1
+ bigquery.test.project=project2
+ bigquery.test.dataset=main_qa
+ bigquery.test.materializationDataset=main_qa
+ bigquery.test.credentialsFile=abc3.json
+ bigquery.test.viewsEnabled=true
+ ```
+
+ 示例
+ ```sql
+ -- step=2
+ -- source=bigquery
+ -- system=test
+ -- target=temp
+ -- tableName=`login_raw`
+ select * from project2.table1
+ ```
+
+### http & http_file
+
+ 参数
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ----------------------- |
+ | connectionName | 无 | 否 | 连接信息 |
+ | url | 无 | 否 | 请求地址 |
+ | httpMethod | GET | 否 | 请求方式 |
+ | timeout | 无 | 否 | timeout时长 |
+ | requestBody | 无 | 否 | 请求体 |
+ | fieldName | value | 否 | 解析response |
+ | jsonPath | $ | 否 | 解析response |
+ | splitBy | 空字符串 | 否 | 解析response |
+ | tempDestinationDir | /tmp | 否 | 本地临时目录 |
+ | hdfsDir | /tmp | 否 | HDFS保存目录 |
+
+ 在application.properties里的配置信息
+
+ ```properties
+ your_connection_name.http.header.Authorization=Basic 123456
+ your_connection_name.http.proxy.host=localhost
+ your_connection_name.http.proxy.port=8080
+ ```
+
+ 示例
+ ```sql
+ -- step=1
+ -- source=http
+ -- url=http://localhost:1080/get_workday?satrt=${START_TIME_TIMESTAMP}&end=${START_TIME_TIMESTAMP}
+ -- target=temp
+ -- tableName=source_data
+
+ -- step=2
+ -- source=temp
+ -- tableName=source_data
+ -- target=temp
+ -- tableName=source_data_workday
+ -- writeMode=append
+ with `workday_temp` as (select explode(from_json(value,
+ 'struct>>').Report_Entry)
+ as Report_Entry
+ from `source_data`)
+ select Report_Entry.`a` as `a`,
+ Report_Entry.`b` as `b`,
+ Report_Entry.`c` as `c`,
+ Report_Entry.`d` as `d`,
+ Report_Entry.`e` as `e`,
+ Report_Entry.`f` as `f`,
+ Report_Entry.`g` as `g`,
+ Report_Entry.`h` as `h`,
+ Report_Entry.`i` as `i`,
+ Report_Entry.`j` as `j`,
+ Report_Entry.`k` as `k`,
+ Report_Entry.`l` as `l`,
+ Report_Entry.`m` as `m`,
+ Report_Entry.`n` as `n`,
+ Report_Entry.`o` as `o`,
+ Report_Entry.`p` as `p`,
+ Report_Entry.`q` as `q`,
+ '${YEAR}' as `year`,
+ '${MONTH}' as `month`,
+ '${DAY}' as `day`,
+ '${HOUR}' as `hour`
+ from `workday_temp`;
+ ```
+
+
+## Write
+
+输出类型分为以下几类:
+
+1. 返回 DataFrame,基于 DataFrame 做进一步操作。
+2. 注册指定内容到 SparkSession 。
+
+详细输出类型如下:
+
+### temp
+
+ 该 step 的计算结果将注册为当前 SparkSession 生命周期内可用的内存临时表,可在后续 Read 类型为 `hive` 或 `temp` 的 step 中直接在 sql 中调用。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:temp。 |
+ | tableName | 无 | 否 | 计算结果注册为临时表时的表名。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=temp
+ -- tableName=temp_table
+ select 1 as a;
+
+ -- step=2
+ -- source=temp
+ -- target=temp
+ select *
+ from temp_table;
+ ```
+
+### hive
+
+ 该 step 的计算结果将写入到目标 hive 表,默认采用动态分区的方式。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:hive。 |
+ | tableName | 无 | 否 | 计算结果写出到 hive 时的 hive 表名。 |
+ | writeMode | 无 | 否 | 可选值:overwrite、append。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=hive
+ -- tableName=temp_table
+ select 1 as a,
+ 2 as b;
+ ```
+
+### 命令行输出
+
+ 该 step 的计算结果将输出到 console ,最多显示前 10000 行。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ----------------- |
+ | dataSourceType | 无 | 否 | 可选值:console。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=console
+ select 1 as a;
+ ```
+
+### variables
+
+ 该 step 的执行结果为只有一行(可以为多列)数据的 DataFrame,每个字段都将被设置为全局变量,可以在后续 step 的 sql 中直接引用。
+
+ 参数:
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=variables
+ select '1' as a,
+ '2' as b;
+
+ -- step=2
+ -- source=temp
+ -- target=hive
+ -- tableName=temp_table
+ select '${a}' as a,
+ '${b}' as b,
+ c
+ from test_table;
+ ```
+
+- udf
+
+ 该 step 将会把 Read 时加载到内存中的类的指定方法注册为 udf。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:udf。 |
+ | methodName | 无 | 否 | 需注册为 udf 的方法名 |
+ | udfName | 无 | 否 | 注册成 udf 后的 udf 名称 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=pmml
+ -- className=com.box.datapipeline.udf.PmmlUDF
+ -- pmmlFileName=test_rand_model.pmml
+ -- target=udf
+ -- methodName=predict
+ -- udfName=predict
+
+ -- step=2
+ -- source=hive
+ -- target=temp
+ -- tableName=temp1
+ select cast(1.0 as float) as x1,
+ cast(1.0 as float) as x2,
+ cast(1.0 as float) as x3;
+
+ -- step=3
+ -- source=temp
+ -- target=temp
+ -- tableName=temp2
+ select predict(
+ struct(
+ x1,
+ x2,
+ x3
+ )
+ ) as target,
+ x1,
+ x2,
+ x3
+ from temp1;
+ ```
+
+### do_nothing
+
+ 该 step 只执行 Read 部分的操作,Write 部分不做任何处理。
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hive
+ -- target=do_nothing
+ truncate table test_table;
+ ```
+
+- jdbc 类
+
+ jdbc 类的 Write 基于统一的接口,用法一致。支持以下输出模式:
+
+ - append
+
+ 追加写数据到目标表,如果存在主键冲突则报错。
+
+ - upsert
+
+ 根据主键对目标表做 upsert 操作。
+
+ - delete
+
+ 根据主键对目标表对 delete 操作。
+
+ - execute
+
+ 直接执行该 step 中的 sql。适合做一些 DDL 之类的操作,例如:truncat table、create table、create index 等。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:mysql、oracle、postgres、ms_sql_server。 |
+ | dbName | 无 | 否 | 如果指定了 dbName,会使用 application.properties 配置文件中以指定的 dbName 开头的配置项去读数据,不指定默认无前缀。 |
+ | tableName | 无 | 否 | 目标表名称。 |
+ | writeMode | 无 | 否 | 可选值:append、upsert、delete、execute。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=temp
+ -- target=postgres
+ -- dbName=test
+ -- tableName=test_table
+ -- writeMode=upsert
+ select *
+ from temp;
+ ```
+
+ - mysql
+ - oracle
+ - postgres
+ - ms_sql_server
+
+### ElasticSearch
+
+ es 操作需在 application.properties 中配置基本连接信息,格式为 `es.*`,示例:
+
+ ```properties
+ es.nodes=localhost:9200,localhost:9201,localhost:9202
+ es.net.http.auth.user=elastic
+ es.net.http.auth.pass=123456
+ es.batch.write.refresh=false
+ es.index.auto.create=true
+ es.batch.size.entries=5000
+ es.batch.size.bytes=15mb
+ es.write.operation=index
+ es.spark.dataframe.write.null=true
+ es.nodes.wan.only=true
+ ```
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------------------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:es。 |
+ | tableName | 无 | 否 | 需写入的 es index 名称。 |
+ | primaryKeys | 无 | 是 | es 中的主键(不设置的话会自动生成),update 模式必须设置,否则无法更新。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hive
+ -- target=es
+ -- tableName=test_index
+ -- primaryKeys=id
+ select id,
+ name
+ from test_table;
+ ```
+
+### kudu
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------------- |
+ | dataSourceType | 无 | 否 | 可选值:kudu。 |
+ | tableName | 无 | 否 | 需写入的 kudu 表(库名.表名)。 |
+
+ ```sql
+ -- step=1
+ -- source=hive
+ -- target=impala_kudu
+ -- tableName=test.test_table
+ -- writeMode=upsert
+ select *
+ from test_table;
+ ```
+
+### impala_kudu
+
+ impale_kudu 与 kudu 查询的逻辑基本一致,都是通过 kudu spark 去操作数据,但不同的是 impale_kudu 是操作由 impala 托管的 kudu 表。由于 impala 默认会在建表时给表名加一个前缀,因此我们在 impala 中操作 kudu 表时使用的表名可能是 'test_db.test_table',但通过 kudu 直接去操作时表名可能就是 `impala::test_db.test_table` 了。
+
+ 因此,需要在 application.properties 配置文件中额外配置 `kudu.table.prefix` 参数。
+
+ ```properties
+ kudu.master=localhost:7051
+ kudu.table.prefix=impala::
+ ```
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ---------------------- |
+ | dataSourceType | 无 | 否 | 可选值:impala_kudu。 |
+ | dbName | 无 | 否 | 需写入的 kudu 表库名。 |
+ | tableName | 无 | 否 | 需写入的 kudu 表表名。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hive
+ -- target=impala_kudu
+ -- dbName=test
+ -- tableName=test_table
+ -- writeMode=upsert
+ select *
+ from test_table;
+ ```
+
+### 文件传输类
+
+ 此类操作的数据源格式为文件(ftp、scp、hdfs),输出类型也是文件,整个过程不需要解析文件内容,只需完成文件传输。
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | ------------------------ |
+ | dataSourceType | 无 | 否 | 可选值:scp、ftp、hdfs。 |
+ | filePath | 无 | 否 | 文件输出路径。 |
+
+ - scp
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ------------ | ------ | -------- | ------------------------------ |
+ | configPrefix | 无 | 否 | 需读取文件的系统连接配置前缀。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hdfs
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+)()
+ -- target=scp
+ -- configPrefix=test
+ -- filePath=/test
+ -- writeMode=overwrite
+ ```
+
+ - ftp
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ------------ | ------ | -------- | ------------------------------ |
+ | configPrefix | 无 | 否 | 需读取文件的系统连接配置前缀。 |
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=hdfs
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test.+)()
+ -- target=ftp
+ -- configPrefix=test
+ -- filePath=/test
+ -- writeMode=overwrite
+ ```
+
+ - hdfs
+
+ 示例:
+
+ ```sql
+ -- step=1
+ -- source=ftp
+ -- configPrefix=test
+ -- fileNamePattern=()(a\.test\..+)(OK)
+ -- deleteSource=false
+ -- target=hdfs
+ -- configPrefix=test
+ -- writeMode=overwrite
+ ```
+
+### hdfs 文件
+
+ 此类 step 数据源格式为结构化数据(各种类型的表),整个输出过程需要将源数据按照指定规则处理好并最终输出。
+
+ hdfs 文件类 step 都有一部分相同的基本参数,如下:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | -------------- | ------ | -------- | -------------------------------------------------- |
+ | dataSourceType | 无 | 否 | 可选值:hdfs、csv。 |
+ | filePath | 无 | 否 | 文件输出路径。 |
+ | writeMode | 无 | 否 | 可选值:overWrite、append。一般都使用 overWrite 。 |
+
+ - hdfs
+
+ 参数:
+
+ | 参数名称 | 默认值 | 是否可空 | 说明 |
+ | ----------------- | -------- | -------- | ------------------------------------------------------------ |
+ | Encoding | utf-8 | 否 | 输出文件的编码格式,默认:utf-8。 |
+ | codecExtension | 空字符串 | 是 | 需压缩后上传到 hdfs 时,指定压缩格式后缀,例如:gz、zip。默认为空,不压缩。 |
+ | separator | 无 | 是 | 按指定分隔符拼接文本时使用的分割符。separator 与 fieldLengthConfig 只配置一个即可。 |
+ | fieldLengthConfig | 无 | 是 | 按固定字段长度拼接文本时各字段的长度配置(字节数)。separator 与 fieldLengthConfig 只配置一个即可。 |
+
+ 示例:
+
+ ```sql
+ -- step=3
+ -- source=hive
+ -- target=hdfs
+ -- configPrefix=test
+ -- filePath=/test.txt
+ -- encoding=gbk
+ -- fieldLengthConfig=18,4
+ -- writeMode=overwrite
+ select a,
+ b
+ from test_table;
+ ```
+
+ - csv
+
+ 示例:
+
+ ```sql
+ -- step=3
+ -- source=hive
+ -- target=csv
+ -- filePath=/test.csv
+ -- writeMode=overwrite
+ select a,
+ b
+ from test_table;
+ ```
+
+
diff --git a/website/docs/developer-setup.md b/website/docs/developer-setup.md
new file mode 100644
index 0000000..fc9cc16
--- /dev/null
+++ b/website/docs/developer-setup.md
@@ -0,0 +1,73 @@
+---
+title: Developer Setup
+sidebar_position: 4
+keywords: [ide, developer, setup]
+toc: true
+last_modified_at: 2019-12-30T15:59:57-04:00
+---
+
+## Pre-requisites
+
+To contribute code, you need
+
+ - a GitHub account
+ - a Linux (or) macOS development environment with Java JDK 8 installed
+ - [Docker](https://www.docker.com/) installed for running demo, integ tests or building website
+ - docker compose
+
+
+## IDE Setup
+
+To contribute, you would need to do the following
+
+- IntelliJ
+- Scala plugin
+
+### Contributing Code
+
+ - Once you finalize on a project/task, please open a new GitHub issue or assign an existing one to yourself.
+ - Almost all PRs should be linked to a GitHub issue. It's always good to have a GitHub issue upfront to avoid duplicating efforts.
+ - But, you may be asked to file a GitHub issue, if reviewer deems it necessary
+ - Before you begin work,
+ - Claim the GitHub issue using the process above and assign the GitHub issue to yourself.
+ - Click "Start Progress" on the GitHub issue, which tells everyone that you are working on the issue actively.
+ - Make your code change
+ - Get existing tests to pass using `./gradlew test`
+ - Add adequate tests for your new functionality
+ - For involved changes, it's best to test the changes in real production environments and report the results in the PR.
+ - For website changes, please build the site locally & test navigation, formatting & links thoroughly
+ - If your code change changes some aspect of documentation (e.g new config, default value change),
+ please ensure there is another PR to [update the docs](https://github.com/SharpData/SharpETL/tree/pages/README.md) as well.
+ - Sending a Pull Request
+ - Please ensure your commit message body is descriptive of the change. Bulleted summary would be appreciated.
+ - Address code review comments & keep pushing changes to your fork/branch, which automatically updates the PR
+ - Before your change can be merged, it should be squashed into a single commit for cleaner commit history.
+ - Finally, once your pull request is merged, make sure to close issue.
+
+### Coding guidelines
+
+Our code can benefit from contributors speaking the same "language" when authoring code. After all, it gets read a lot more than it gets
+written. So optimizing for "reads" is a good goal. The list below is a set of guidelines, that contributors strive to upkeep and reflective
+of how we want to evolve our code in the future.
+
+## Code & Project Structure
+
+ * `common` : most common case classes
+ * `core` : core mudules, like `WorkflowInterpreter`, `WorkflowParser` etc.
+ * `datasource` : Generic datasource API
+ * `docker` : Docker containers used by demo and integration tests. Brings up a mini data ecosystem locally
+ * `flink` : Implementation for the Flink engine
+ * `spark` : Implementation for the Spark engine
+
+## Code WalkThrough
+
+[This Quick start](/docs/quick-start-guide) will give you a start.
+
+## Docker Setup
+
+We encourage you to test your code on docker, please follow this for [docker setup](/docs/docker-setup).
+
+## Website
+
+[Sharp ETL site](/) is hosted on a special `pages` branch. Please follow the `README` file under `pages` on that branch for
+instructions on making changes to the website.
diff --git a/website/docs/docker-setup.md b/website/docs/docker-setup.md
new file mode 100644
index 0000000..596386d
--- /dev/null
+++ b/website/docs/docker-setup.md
@@ -0,0 +1,61 @@
+---
+title: "Docker setup"
+sidebar_position: 5
+toc: true
+last_modified_at: 2021-11-03T18:25:57-04:00
+---
+
+This guide provides quick docker setup for local testing
+
+## Requirments
+
+- Docker
+- Docker compose
+
+## Setup step by step
+
+```bash
+cd docker
+docker compose up -d # to start ETL database(mysql 5.7.28) & hive instance(version 3.1.2)
+```
+
+To access local hive instance you need
+
+in `spark/build.gradle`
+
+```diff
++ implementation "org.apache.spark:spark-hive_$scalaVersion:$sparkVersion"
+```
+
+add `hive-site.xml` in `spark/src/main/resources/hive-site.xml`
+
+```xml
+
+
+ hive.metastore.uris
+ thrift://localhost:9083
+
+
+
+ hive.metastore.warehouse.dir
+ file:///Users/$(whoami)/Documents/warehouse
+
+
+
+ hive.metastore.warehouse.external.dir
+ file:///Users/$(whoami)/Documents/warehouse
+
+
+```
+
+add `core-site.xml` in `spark/src/main/resources/core-site.xml`
+
+```xml
+
+
+ fs.defaultFS
+ file:///Users/$(whoami)/Documents/warehouse
+ true
+
+
+```
diff --git a/website/docs/dwd-config-template.md b/website/docs/dwd-config-template.md
new file mode 100644
index 0000000..91b0496
--- /dev/null
+++ b/website/docs/dwd-config-template.md
@@ -0,0 +1,89 @@
+---
+title: "DWD config template"
+sidebar_position: 3
+toc: true
+last_modified_at: 2022-11-25T14:26:32-04:00
+---
+
+本片文档主要介绍DWD配置模板的参数和使用方式。
+
+配置模板example可以参考quick start的[配置文件](https://docs.google.com/spreadsheets/d/1CetkqBsXj_E8oZBsws9iGdaJB1QJUajnwqH4FoKhXKA/edit#gid=1485376124)。
+
+## 数据源配置:dwd_etl_config
+
+`source_connection`: 配置在application.properties中的connection
+
+`source_type`: 数据库类型,例如:hive
+
+`source_db`: 从哪个数据库获取数据
+
+`source_table`: 从哪张表获取数据
+
+`target_connection`: 目标连接,配置在application.properties中的connection。例如:hive
+
+`target_type`: 目标数据库类型,例如:hive
+
+`target_db`: dwd数据库库名
+
+`target_table`: dwd表名
+
+`fact_or_dim`: 标识当前表是维度表还是事实表。当配置为dim(维度表)时,如果`slow_changing`未配置,默认`slow_changing`的值为TRUE;当配置为fact(事实表)时,如果`slow_changing`未配置,默认`slow_changing`的值为FALSE;【枚举值:fact/dim】
+
+`slow_changing`: 标识当前表是否渐变。【枚举值:TRUE/FALSE】
+
+`row_filter_expression`: 是否可空:是。例如:location = 'shanghai',表示只取上海地区的数据。会作为where表达式拼接在查询源数据表的sql中
+
+`load_type`: 标识增量全量【枚举值:incremental/full】
+
+`log_driven_type`: 日志驱动类型,【枚举值:timewindow/upstream/kafka_offset/auto_inc_id/diff】其中,timewindow为基于时间窗口;upstream为基于上一层ETL依赖;kafka_offset为基于消息队列offset;auto_inc_id为基于自增id(该id必须代表数据的唯一性);diff为基于文件对比
+
+`upstream`: 日志驱动类型为upstream模式下的相关配置,标识依赖于哪一个ETL job
+
+`depends_on`: 调度服务依赖。
+
+`default_start`: timewindow模式下的开始时间
+
+
+## 表配置:dwd_config
+
+`source_table`: 从哪张表获取数据
+
+`target_table`: 当前ETL的目标表名称
+
+`source_column`: 源表列名称
+
+`source_column_description`: 列描述
+
+`target_column`: 目标表需要保留的列名
+
+`extra_column_expression`: 对于`source_column`不存在,但是`target_column`存在的列的逻辑表达式描述。其中**zip_id_flag**值为特殊标识,代表该`target_column`为关联主数据后获取的主数据zip_id
+
+`partition_column`: 标识哪些列作为分区列。用TRUE标识
+
+`logic_primary_column`: 标识逻辑主键。用TRUE标识
+
+`join_db_connection`: 标识需要关联的维度表所在数据库的连接,配置在application.properties中的connection
+
+`join_db_type`: 标识需要关联的维度表所在数据库的类型
+
+`join_db`: 标识需要关联的维度表所在数据库的名称
+
+`join_table`: 标识需要关联的维度表名
+
+`join_on`: 标识和需要关联的维度表的哪个列进行关联
+
+`create_dim_mode`: 标识当前表中的维度信息是否要插入到维度表的标识。【枚举值:never/once/always】解释如下。
+
+* never:当关联不上维度中的维度时,当前表的该列标识为关联不上维度信息,默认为-1
+
+* once:当关联不上维度中的维度时,会从当前表中往维度表中插入缺失的维度信息,通常为迟到维的处理方法
+
+* always:从当前表中往维度表中插入维度信息,即,当前表会作为维度表的一个数据源头
+
+`join_table_column`: 标识需要关联的维度表的列名。通常用作`create_dim_mode`为once和always的场景,即当前表的哪些列需要插入到所关联的维度表中
+
+`business_create_time`: 标识当前表的业务时间
+
+`business_update_time`: 标识当前表的更新时间
+
+`ignore_changing_column`: 标识当前表的哪些列不用作为拉链表的渐变列。用TRUE标识。该列为拉链表的标识列
\ No newline at end of file
diff --git a/website/docs/encrypt-in-properties-file.md b/website/docs/encrypt-in-properties-file.md
new file mode 100644
index 0000000..bcf29b5
--- /dev/null
+++ b/website/docs/encrypt-in-properties-file.md
@@ -0,0 +1,35 @@
+---
+title: "Encrypt your confidential information in properties file"
+sidebar_position: 8
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+## generate your private key `etl.key`
+
+1. open the test `ETLConfigSpec`.
+2. replace the path where you want to save the key.
+3. replace the content which is the encryter password key.
+4. if you want to use the offset, you can replace to 10 to the number that you want.
+5. run the test.
+
+## Set up it in your env
+
+1. upload the key file to hdfs path. eg, `hdfs:///etl/conf/etl.key`
+2. update the properties file in hdfs. eg, `encrpy.keyPath=hdfs:///etl/conf/etl.key`
+3. if you have set a new offset instead of 10, you need to update the properties file in hdfs. eg, `encrypt.offset=11xx`
+
+## using encrypt command
+
+1. prepare `application.properties`:
+
+```properties
+encrypt.algorithm=PBEWithMD5AndDES
+encrypt.keyPath=/path/to/etl.key
+```
+
+2. run encrypt command
+
+```bash
+./gradlew :spark:run --args="encrypt -p file:///path/to/application.properties 'content_to_be_encrypted'"
+```
diff --git a/website/docs/end-to-end-showcase-postgres.md b/website/docs/end-to-end-showcase-postgres.md
new file mode 100644
index 0000000..7d63e92
--- /dev/null
+++ b/website/docs/end-to-end-showcase-postgres.md
@@ -0,0 +1,435 @@
+---
+title: "End to end showcase(Postgres)"
+sidebar_position: 2
+toc: true
+last_modified_at: 2022-04-09T10:59:57+8:00
+---
+
+
+## 环境准备
+
+* [Docker setup](docker-setup)
+
+```bash
+docker run --name postgres -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 postgres:12.0-alpine
+```
+
+## 运行任务
+
+### 准备源表
+
+postgres:
+
+```sql
+create schema if not exists sales;
+
+drop table if exists sales.order;
+create table if not exists sales.order
+(
+ order_sn varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp
+);
+
+drop table if exists sales.user;
+create table if not exists sales.user
+(
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ create_time timestamp,
+ update_time timestamp
+);
+
+drop table if exists sales.product;
+create table if not exists sales.product
+(
+ mid varchar(128),
+ name varchar(128),
+ version varchar(128),
+ status varchar(128),
+ create_time timestamp,
+ update_time timestamp
+);
+```
+
+
+Postgres ods:
+
+```sql
+create schema if not exists ods;
+
+drop table if exists ods.t_order;
+create table if not exists ods.t_order
+(
+ order_sn varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ job_id varchar(128)
+) ;
+
+drop table if exists ods.t_user;
+create table if not exists ods.t_user
+(
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ create_time timestamp,
+ update_time timestamp,
+ job_id varchar(128)
+);
+
+drop table if exists ods.t_product;
+create table if not exists ods.t_product
+(
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ create_time timestamp,
+ update_time timestamp,
+ job_id varchar(128)
+);
+```
+
+Postgres dwd:
+
+```sql
+SET search_path TO dwd, public;
+create extension if not exists "uuid-ossp";
+create schema if not exists dwd;
+drop table if exists dwd.t_fact_order;
+create table dwd.t_fact_order
+(
+ order_sn varchar(128),
+ product_id varchar(128),
+ user_id varchar(128),
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10, 4),
+ job_id varchar(128)
+);
+
+drop table if exists dwd.t_dim_product;
+create table if not exists dwd.t_dim_product
+(
+ product_id varchar(128) default uuid_generate_v1(),
+ mid varchar(128),
+ name varchar(128),
+ version varchar(128),
+ status varchar(128),
+ create_time timestamp,
+ update_time timestamp,
+ job_id varchar(128),
+ start_time timestamp,
+ end_time timestamp,
+ is_latest varchar(1),
+ is_active varchar(1),
+ is_auto_created varchar(1)
+);
+
+drop table if exists dwd.t_dim_user;
+create table if not exists dwd.t_dim_user
+(
+ dim_user_id varchar(128) default uuid_generate_v1(),
+ user_info_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ create_time timestamp,
+ update_time timestamp,
+ job_id varchar(128),
+ start_time timestamp,
+ end_time timestamp,
+ is_latest varchar(1),
+ is_active varchar(1),
+ is_auto_created varchar(1)
+);
+```
+
+Postgres report:
+
+```sql
+-- ==report层 华为mate40-v2真实的销量表
+create schema if not exists report;
+drop table if exists report.t_fact_order_report_actual;
+create table report.t_fact_order_report_actual(
+ order_sn varchar(128),
+ product_id varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ price decimal(10,4),
+ discount decimal(10,4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10,4)
+);
+
+-- ==report层 华为mate40-v2算上v1的销量
+create schema if not exists report;
+drop table if exists report.t_fact_order_report_latest;
+create table report.t_fact_order_report_latest(
+ order_sn varchar(128),
+ product_id varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ price decimal(10,4),
+ discount decimal(10,4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10,4)
+);
+```
+
+
+
+### 运行从源到ods的任务
+
+0. 准备数据
+
+为dwd插入预先提供的数据
+
+```sql
+truncate table dwd.t_fact_order;
+insert into dwd.t_fact_order (order_sn, product_id,
+ user_id, product_count, price,
+ discount, order_status,
+ order_create_time,
+ order_update_time, actual,
+ job_id)
+values ('AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1,
+ '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7, 1),
+ ('BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2,
+ '2022-04-04 11:00:00', '2022-04-08 11:00:00', 9.7, 1),
+ ('CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 1,
+ '2022-04-04 12:00:00', '2022-04-04 12:00:00', 29.7, 1);
+
+truncate table dwd.t_dim_user;
+insert into dwd.t_dim_user (dim_user_id, user_info_code, user_name,
+ user_age, user_address, create_time,
+ update_time,
+ start_time, end_time, is_active,
+ is_latest, is_auto_created)
+values ('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00',
+ '2020-01-01 10:00:00', null, '1', '1', '0');
+
+
+truncate table dwd.t_dim_product;
+insert into dwd.t_dim_product (product_id, mid, name, version,
+ status, create_time, update_time,
+ start_time, end_time,
+ is_active,
+ is_latest, is_auto_created)
+values ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2022-01-01 10:00:00',
+ '2022-01-01 10:00:00', '2022-01-01 10:00:00', '2022-04-04 13:00:00', '0', '0', '0')
+ , ('a9cd4e31-9268-4ee9-94b4-18c5e839a937', 'p1', '华为', 'mate40-v2', '上架', '2022-01-01 10:00:00',
+ '2022-04-04 13:00:00', '2022-04-04 13:00:00', null, '1', '1', '0');
+
+```
+
+准备源表数据
+
+```sql
+truncate table sales.order;
+insert into sales.order (order_sn, product_code, product_name, product_version, product_status,
+ user_code, user_name, user_age, user_address, product_count, price, discount, order_status,
+ order_create_time, order_update_time)
+values ('AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00',
+ '2022-04-08 10:00:00') -- 正常更新
+ , ('AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00',
+ '2022-04-08 10:00:00') -- 重复数据
+ , ('BBB', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 10, 0.3, 1, '2022-04-04 10:00:00',
+ '2022-04-08 10:00:00') -- 时间顺序错乱,不做修改
+ , ('DDD', 'p1', '华为', 'mate40-v2', '上架', 'u2', '李四', 32, '迎宾街道', 15, 200, 0.4, 1, '2022-04-08 09:00:00',
+ '2022-04-08 10:00:00') -- 新增,华为p1名称修改
+ , ('DDD', 'p2', '华为', 'mate50', '上架', 'u3', '李四', 32, '迎宾街道', 15, 330, 0.4, 1, '2022-04-08 09:00:00',
+ '2022-04-08 10:00:00'); -- 新增, p2迟到维不作处理的场景
+```
+
+
+1. 下载已经编辑好的 [soudce->ods模板](https://docs.google.com/spreadsheets/d/1vvWq26t7i_9bFXaRMQpQsFKXnBBHkIEzVEI3EI_eLIg/edit#gid=0) 到 `~/Desktop` 来准备生成对应的任务脚本
+
+2. 通过这个命令生成任务脚本
+
+```bash
+./gradlew :spark:run --args="generate-ods-sql -f ~/Desktop/postgres-ods.xlsx --output ~/Downloads/SharpETL/spark/src/main/resources/tasks/"
+```
+
+3. 你看到如下日志表示任务脚本已经生成好了
+
+```log
+2022/08/03 10:54:49 INFO [ETLLogger] - Write sql file to /Users/xiaoqiangma/Downloads/SharpETL/spark/src/main/resources/tasks/ods__ods.t_order.sql
+2022/08/03 10:54:49 INFO [ETLLogger] - Write sql file to /Users/xiaoqiangma/Downloads/SharpETL/spark/src/main/resources/tasks/ods__ods.t_user.sql
+2022/08/03 10:54:49 INFO [ETLLogger] - Write sql file to /Users/xiaoqiangma/Downloads/SharpETL/spark/src/main/resources/tasks/ods__ods.t_product.sql
+```
+
+4. 创建ods表
+
+
+5. 准备连接信息
+
+```properties
+sales.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
+sales.postgres.user=postgres
+sales.postgres.password=postgres
+sales.postgres.driver=org.postgresql.Driver
+sales.postgres.fetchsize=10
+```
+
+6. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="single-job --name=ods__ods.t_order --period=1440 --default-start-time='2022-04-08 00:00:00' --local --once"
+```
+
+
+
+### 运行从ods到dwd的任务
+
+1. 下载提前准备好的[ods->dwd 模板](https://docs.google.com/spreadsheets/d/19pOIogg31JWRUiKYyFLnQXCnPihYFOitamlfzrIXWfE/edit#gid=0)到桌面
+
+2. 通过这个命令生成任务脚本
+
+```bash
+./gradlew :spark:run --args="generate-dwd-sql -f ~/Desktop/postgres-dwd.xlsx --output ~/Downloads/SharpETL/spark/src/main/resources/tasks/"
+```
+
+3. 你看到如下日志表示任务脚本已经生成好了
+
+```log
+2022/08/03 10:58:10 INFO [ETLLogger] - Write sql file to /Users/xiaoqiangma/Downloads/SharpETL/spark/src/main/resources/tasks/ods.t_order_dwd.t_fact_order.sql
+```
+
+4. 创建dwd/dim表
+
+6. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="single-job --name=ods.t_order_dwd.t_fact_order --period=1440 --local"
+```
+
+
+
+### 运行从dwd到report的任务
+
+1. 手动创建两个step,分别代表两个report的需求:
+ 1. report层 华为mate40-v2真实的销量表,并将其放在`~/Downloads/SharpETL/spark/src/main/resources/tasks/`路径下,命名为`order_report_actual.sql`
+
+ ```sql
+ -- workflow=order_report_actual
+ -- period=1440
+ -- loadType=incremental
+ -- logDrivenType=timewindow
+
+ -- step=1
+ -- sourceConfig
+ -- dataSourceType=postgres
+ -- dbName=postgres
+ -- tableName=dwd.t_fact_order
+ -- targetConfig
+ -- dataSourceType=postgres
+ -- dbName=postgres
+ -- tableName=report.t_fact_order_report_actual
+ -- writeMode=overwrite
+ -- incrementalType=depend_on_upstream
+ select
+ fact.order_sn order_sn,
+ dim.product_id product_id,
+ dim.mid product_code,
+ dim.name product_name,
+ dim.version product_version,
+ dim.status product_status,
+ fact.price price,
+ fact.discount discount,
+ fact.order_status order_status,
+ fact.order_create_time order_create_time,
+ fact.order_update_time order_update_time,
+ fact.actual actual
+ from dwd.t_fact_order fact
+ inner join dwd.t_dim_product dim
+ on fact.product_id = dim.product_id;
+ ```
+
+ 2. report层 华为mate40-v2算上v1的销量,并将其放在`~/Downloads/SharpETL/spark/src/main/resources/tasks/`路径下,命名为`order_report_latest.sql`
+
+ ```sql
+ -- workflow=order_report_latest
+ -- period=1440
+ -- loadType=incremental
+ -- logDrivenType=timewindow
+
+ -- step=1
+ -- sourceConfig
+ -- dataSourceType=postgres
+ -- dbName=postgres
+ -- tableName=dwd.t_fact_order
+ -- targetConfig
+ -- dataSourceType=postgres
+ -- dbName=postgres
+ -- tableName=report.t_fact_order_report_latest
+ -- writeMode=overwrite
+ -- incrementalType=depend_on_upstream
+ select
+ fact.order_sn order_sn,
+ dim2.product_id product_id,
+ dim2.mid product_code,
+ dim2.name product_name,
+ dim2.version product_version,
+ dim2.status product_status,
+ fact.price price,
+ fact.discount discount,
+ fact.order_status order_status,
+ fact.order_create_time order_create_time,
+ fact.order_update_time order_update_time,
+ fact.actual actual
+ from dwd.t_fact_order fact
+ inner join dwd.t_dim_product dim on fact.product_id = dim.product_id
+ inner join (select * from dwd.t_dim_product dim_latest where is_latest='1') dim2 on dim.mid = dim2.mid;
+ ```
+
+2. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="single-job --name=order_report_actual --period=1440 --local"
+```
+```bash
+./gradlew :spark:run --args="single-job --name=order_report_latest --period=1440 --local"
+```
+
+
diff --git a/website/docs/end-to-end-showcase.md b/website/docs/end-to-end-showcase.md
new file mode 100644
index 0000000..15c52c9
--- /dev/null
+++ b/website/docs/end-to-end-showcase.md
@@ -0,0 +1,421 @@
+---
+title: "End to end showcase(Hive)"
+sidebar_position: 2
+toc: true
+last_modified_at: 2022-04-09T10:59:57+8:00
+---
+
+
+## 环境准备
+
+* [Docker setup](docker-setup)
+
+```bash
+docker run --name postgres -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 postgres:12.0-alpine
+```
+
+## 运行任务
+
+### 准备源表
+
+postgres:
+
+```sql
+create schema if not exists sales;
+
+create table if not exists sales.order
+(
+ order_sn varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp
+);
+
+create table if not exists sales.user
+(
+ user_code varchar(128),
+ user_name varchar(128),
+ user_age int,
+ user_address varchar(128),
+ create_time timestamp,
+ update_time timestamp
+);
+
+create table if not exists sales.product
+(
+ mid varchar(128),
+ name varchar(128),
+ version varchar(128),
+ status varchar(128),
+ create_time timestamp,
+ update_time timestamp
+);
+```
+
+
+hive ods:
+
+```sql
+create database if not exists ods;
+
+create table if not exists ods.t_order
+(
+ order_sn string,
+ product_code string,
+ product_name string,
+ product_version string,
+ product_status string,
+ user_code string,
+ user_name string,
+ user_age int,
+ user_address string,
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status string,
+ order_create_time timestamp,
+ order_update_time timestamp,
+ job_id string
+) partitioned by (`year` string, `month` string, `day` string);
+
+create table if not exists ods.t_user
+(
+ user_code string,
+ user_name string,
+ user_age int,
+ user_address string,
+ create_time timestamp,
+ update_time timestamp,
+ job_id string
+) partitioned by (`year` string, `month` string, `day` string);
+
+create table if not exists ods.t_product
+(
+ product_code string,
+ product_name string,
+ product_version string,
+ product_status string,
+ create_time timestamp,
+ update_time timestamp,
+ job_id string
+) partitioned by (`year` string, `month` string, `day` string);
+```
+
+hive dwd:
+
+```sql
+create database if not exists dwd;
+create database if not exists dim;
+create table dwd.t_fact_order
+(
+ order_id string,
+ order_sn string,
+ product_id string,
+ user_id string,
+ product_count int,
+ price decimal(10, 4),
+ discount decimal(10, 4),
+ order_status string,
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10, 4),
+ job_id string,
+ start_time timestamp,
+ end_time timestamp,
+ is_latest string,
+ is_active string
+) partitioned by (`year` string, `month` string, `day` string);
+
+create table if not exists dim.t_dim_product
+(
+ product_id string,
+ mid string,
+ name string,
+ version string,
+ status string,
+ create_time timestamp,
+ update_time timestamp,
+ job_id string,
+ start_time timestamp,
+ end_time timestamp,
+ is_latest string,
+ is_active string,
+ is_auto_created string
+) partitioned by (`year` string, `month` string, `day` string);
+
+
+create table if not exists dim.t_dim_user
+(
+ dim_user_id string,
+ user_info_code string,
+ user_name string,
+ user_age int,
+ user_address string,
+ create_time timestamp,
+ update_time timestamp,
+ job_id string,
+ start_time timestamp,
+ end_time timestamp,
+ is_latest string,
+ is_active string,
+ is_auto_created string
+) partitioned by (`year` string, `month` string, `day` string);
+```
+
+hive report:
+
+```sql
+-- ==report层 华为mate40-v2真实的销量表
+create database if not exists report;
+create table if not exists report.t_fact_order_report_actual(
+ order_sn varchar(128),
+ product_id varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ price decimal(10,4),
+ discount decimal(10,4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10,4)
+);
+
+-- ==report层 华为mate40-v2算上v1的销量
+create table if not exists report.t_fact_order_report_latest(
+ order_sn varchar(128),
+ product_id varchar(128),
+ product_code varchar(128),
+ product_name varchar(128),
+ product_version varchar(128),
+ product_status varchar(128),
+ price decimal(10,4),
+ discount decimal(10,4),
+ order_status varchar(128),
+ order_create_time timestamp,
+ order_update_time timestamp,
+ actual decimal(10,4)
+);
+```
+
+
+### 运行从源到ods的任务
+
+1. 准备数据
+
+为dwd插入预先提供的数据
+
+```sql
+set hive.exec.dynamic.partition=true;
+truncate table dwd.t_fact_order;
+insert into dwd.t_fact_order partition (`year` = '2022', `month` = '04', `day` = '04') (order_id, order_sn, product_id,
+ user_id, product_count, price,
+ discount, order_status,
+ order_create_time,
+ order_update_time, actual,
+ start_time, end_time, is_active,
+ is_latest,
+ job_id)
+values ('aaa', 'AAA', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 20, 0.3, 1,
+ '2022-04-04 10:00:00', '2022-04-04 10:00:00', 19.7, '2022-04-04 10:00:00', null, '1', '1', 1),
+ ('bbb', 'BBB', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 10, 0.3, 2,
+ '2022-04-04 11:00:00', '2022-04-04 11:00:00', 9.7, '2022-04-04 11:00:00', null, '1', '1', 1),
+ ('ccc', 'CCC', '3abd0495-9abe-44a0-b95b-0e42aeadc807', '06347be1-f752-4228-8480-4528a2166e14', 12, 30, 0.3, 1,
+ '2022-04-04 12:00:00', '2022-04-04 12:00:00', 29.7, '2022-04-04 12:00:00', null, '1', '1', 1);
+
+truncate table dim.t_dim_user;
+insert into dim.t_dim_user partition (year = '2022', month = '04', day = '04') (dim_user_id, user_info_code, user_name,
+ user_age, user_address, create_time,
+ update_time,
+ start_time, end_time, is_active,
+ is_latest, is_auto_created)
+values ('06347be1-f752-4228-8480-4528a2166e14', 'u1', '张三', 12, '胜利街道', '2020-01-01 10:00:00', '2020-01-01 10:00:00',
+ '2020-01-01 10:00:00', null, '1', '1', '0');
+
+truncate table dim.t_dim_product;
+
+insert into dim.t_dim_product partition (year = '2022', month = '04', day = '04') (product_id, mid, name, version,
+ status, create_time, update_time,
+ start_time, end_time,
+ is_active,
+ is_latest, is_auto_created)
+values ('3abd0495-9abe-44a0-b95b-0e42aeadc807', 'p1', '华为', 'mate40', '上架', '2022-01-01 10:00:00',
+ '2022-01-01 10:00:00', '2022-01-01 10:00:00', '2022-04-04 13:00:00', '0', '0', '0')
+ , ('a9cd4e31-9268-4ee9-94b4-18c5e839a937', 'p1', '华为', 'mate40-v2', '上架', '2022-01-01 10:00:00',
+ '2022-04-04 13:00:00', '2022-04-04 13:00:00', null, '1', '1', '0');
+
+```
+
+准备源表数据
+
+```sql
+truncate table sales.order;
+insert into sales.order (order_sn, product_code, product_name, product_version, product_status,
+ user_code, user_name, user_age, user_address, product_count, price, discount, order_status,
+ order_create_time, order_update_time)
+values ('AAA', 'p1', '华为', 'mate40', '上架', 'u1', '张三', 12, '胜利街道', 12, 20, 0.3, 2, '2022-04-04 10:00:00',
+ '2022-04-08 10:00:00') -- 正常更新
+ , ('DDD', 'p1', '华为', 'mate40-v2', '上架', 'u2', '李四', 32, '迎宾街道', 15, 200, 0.4, 2, '2022-04-08 09:00:00',
+ '2022-04-08 10:00:00'); -- 新增
+```
+
+
+2. 下载已经编辑好的 [excel模板](https://docs.google.com/spreadsheets/d/1Zn_Q-QUTf6us4RwdgUgBosXL09-D-TowmgwWlDskvlA/edit?usp=sharing) 到 `~/Desktop` 来准备生成对应的任务脚本
+
+3. 通过这个命令生成任务脚本
+
+```bash
+./gradlew :spark:run --args="generate-ods-sql -f ~/Desktop/ods.xlsx --output ~/Downloads/SharpETL/spark/src/main/resources/tasks"
+```
+
+4. 你看到如下日志表示任务脚本已经生成好了
+
+```log
+2022/08/02 17:23:51 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/ods__t_order.sql
+2022/08/02 17:23:51 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/ods__t_user.sql
+2022/08/02 17:23:51 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/ods__t_product.sql
+```
+
+5. 准备连接信息
+
+```properties
+sales.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
+sales.postgres.user=postgres
+sales.postgres.password=postgres
+sales.postgres.driver=org.postgresql.Driver
+sales.postgres.fetchsize=10
+```
+
+6. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="batch-job --names=ods__t_order --period=1440 --default-start-time='2022-04-08 00:00:00' --once"
+```
+
+运行结果:
+
+```log
+Total jobs: 1, success: 1, failed: 0, skipped: 0
+Details:
+job name: ods__t_order SUCCESS x 1
+```
+
+### 运行从ods到dwd的任务
+
+1. 下载提前准备好的[excel](https://docs.google.com/spreadsheets/d/1CetkqBsXj_E8oZBsws9iGdaJB1QJUajnwqH4FoKhXKA/edit?usp=sharing)到桌面
+
+2. 通过这个命令生成任务脚本
+
+```bash
+./gradlew :spark:run --args="generate-dwd-sql -f ~/Desktop/dwd.xlsx --output ~/Downloads/SharpETL/spark/src/main/resources/tasks"
+```
+
+3. 你看到如下日志表示任务脚本已经生成好了
+
+```log
+2022/08/03 09:02:26 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/t_order_t_fact_order.sql
+2022/08/03 09:02:26 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/t_user_t_dim_user.sql
+2022/08/03 09:02:26 INFO [ETLLogger] - Write sql file to /Users/izhangzhihao/Downloads/SharpETL/spark/src/main/resources/tasks/t_product_t_dim_product.sql
+```
+
+4. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="batch-job --names=t_order_t_fact_order --period=1440 --default-start-time='2022-04-08 00:00:00' --once"
+```
+
+运行结果:
+
+```log
+Total jobs: 1, success: 1, failed: 0, skipped: 0
+Details:
+job name: t_order_t_fact_order SUCCESS x 1
+```
+
+### 运行从dwd到report的任务
+
+1. 手动创建两个step,分别代表两个report的需求, 华为mate40-v2真实的销量表,并将其放在`~/Downloads/sharp-etl/spark/src/main/resources/tasks/`路径下,命名为`order_report_actual_hive.sql`
+
+```sql
+-- workflow=report__t_fact_order_report_actual
+-- period=1440
+-- loadType=incremental
+-- logDrivenType=timewindow
+
+-- step=1
+-- source=hive
+-- dbName=dwd
+-- tableName=t_fact_order
+-- target=hive
+-- dbName=report
+-- tableName=t_fact_order_report_actual
+-- writeMode=overwrite
+select fact.order_sn as order_sn,
+ dim.product_id as product_id,
+ dim.mid as product_code,
+ dim.name as product_name,
+ dim.version as product_version,
+ dim.status as product_status,
+ fact.price as price,
+ fact.discount as discount,
+ fact.order_status as order_status,
+ fact.order_create_time as order_create_time,
+ fact.order_update_time as order_update_time,
+ fact.actual as actual
+from dwd.t_fact_order fact
+ inner join dim.t_dim_product dim
+ on fact.product_id = dim.product_id
+ and fact.is_latest = '1';
+```
+
+2. 手动创建两个step,分别代表两个report的需求, 华为mate40-v2算上v1的销量,并将其放在`~/Downloads/sharp-etl/spark/src/main/resources/tasks/`路径下,命名为`order_report_latest_hive.sql`
+
+```sql
+-- workflow=report__t_fact_order_report_latest
+-- period=1440
+-- loadType=incremental
+-- logDrivenType=timewindow
+
+-- step=1
+-- source=hive
+-- dbName=dwd
+-- tableName=t_fact_order
+-- target=hive
+-- dbName=report
+-- tableName=t_fact_order_report_latest
+-- writeMode=overwrite
+select fact.order_sn as order_sn,
+ dim2.product_id as product_id,
+ dim2.mid as product_code,
+ dim2.name as product_name,
+ dim2.version as product_version,
+ dim2.status as product_status,
+ fact.price as price,
+ fact.discount as discount,
+ fact.order_status as order_status,
+ fact.order_create_time as order_create_time,
+ fact.order_update_time as order_update_time,
+ fact.actual as actual
+from dwd.t_fact_order fact
+ inner join dim.t_dim_product dim on fact.product_id = dim.product_id and fact.is_latest = '1'
+ inner join (select * from dim.t_dim_product dim_latest where is_latest = '1') dim2
+ on dim.mid = dim2.mid and fact.is_latest = '1';
+```
+
+2. 通过脚本启动任务
+
+```bash
+./gradlew :spark:run --args="batch-job --names=order_report_actual_hive,order_report_latest_hive --period=1440 --default-start-time='2022-04-08 00:00:00' --once"
+```
diff --git a/website/docs/excel-template-dwd.md b/website/docs/excel-template-dwd.md
new file mode 100644
index 0000000..26b707b
--- /dev/null
+++ b/website/docs/excel-template-dwd.md
@@ -0,0 +1,1376 @@
+---
+title: "Excel template for ods to dwd"
+sidebar_position: 4
+toc: true
+last_modified_at: 2021-10-21T10:59:57-04:00
+---
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+## pre-requirements
+
+* all table(fact & dim) must exist before running ETL jobs(ETL doesn't create table)
+* you can download [this excel](https://docs.google.com/spreadsheets/d/1Prw1LFfkSkaAuf1K6O0TTI5PPRP7lLtzIR63x9HCSVw/edit#gid=1642393109) to your `~/Desktop` for the quick start guide.
+
+## Case 1
+
+Just copy all data from ods to dwd, not joining with other tables and no quality check.
+
+
+
+
+
+| source_db_name | source_table_name | column_name | incremental_type | target_db_name | target_table_name | Target column_name | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column |
+| :------------- | :---------------- | :----------------- | :----------------- | :------------- | :---------------- | :----------------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :-------------- | :-------------- |
+| usecase_ods | test_cust | id | incremental_append | usecase_dwd | t_fact_test_cust | id | | TRUE | | | | | | | | | | | |
+| usecase_ods | test_cust | code | incremental_append | usecase_dwd | t_fact_test_cust | code | | | | | | | | | | | | | |
+| usecase_ods | test_cust | bz_time | incremental_append | usecase_dwd | t_fact_test_cust | bz_time | TRUE | | | | | | | | | | | | |
+
+
+
+
+:::note
+You need to rename the sheet name of Case1 to `Fact` to ensure the above command to execute successfully.
+:::
+
+When generating dimension-split table, the corresponding command line is as follows:
+
+```bash
+./gradlew clean :spark:run --args="generate-fact-sql -f ~/Desktop/数据字典-模版.xlsx --output ~/Desktop/"
+```
+
+The sql file corresponding to Case1 then will be generated on your desktop named `dwd_t_fact_test_cust.sql`:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_cust
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`1d764347`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `id` as `id`,
+ `code` as `code`,
+ `bz_time` as `bz_time`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_cust`;
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=t_fact_test_cust
+-- target=temp
+-- tableName=`0e5a2f6c`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`t_fact_test_cust` `t_fact_test_cust`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`t_fact_test_cust` `t_fact_test_cust`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`1d764347`
+-- dwViewName=`0e5a2f6c`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=t_fact_test_cust
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+```
+
+
+
+
+## Case 2
+The Case2 is generated by the new Excel: the dim table is firstly imported, then is the fact table.
+The fact table joining with the dim table on the basis that the dim table is already existing and completed.
+
+
+
+
+
+| source_db_name | source_table_name | column_name | incremental_type | target_db_name | target_table_name | Target column_name | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column |
+| :------------- | :---------------- | :----------------- | :----------------- | :------------- | :---------------- | :----------------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :------------- | :-------------- |
+| usecase_ods | test_split | id | incremental_append | usecase_dwd | test_fact_split | id | | TRUE | | | | | | | | | | |
+| usecase_ods | test_split | user_id | incremental_append | usecase_dwd | test_fact_split | user_id | | | | TRUE | | TRUE | | | usecase_dwd | t_dim_user | id | |
+| usecase_ods | test_split | user_name | incremental_append | usecase_dwd | test_fact_split | user_name | | | | | | TRUE | | | usecase_dwd | t_dim_user | user_name | |
+| usecase_ods | test_split | user_account | incremental_append | usecase_dwd | test_fact_split | user_account | | | | | | | | | usecase_dwd | t_dim_user | user_account | |
+| usecase_ods | test_split | bz_time | incremental_append | usecase_dwd | test_fact_split | bz_time | TRUE | | | | | | | | | | | |
+| usecase_ods | test_user | id | incremental_append | usecase_dwd | t_dim_user | id | | TRUE | | | | | | | | | | |
+| usecase_ods | test_user | user_name | incremental_append | usecase_dwd | t_dim_user | user_name | | | | | | | | | | | | |
+| usecase_ods | test_user | user_account | incremental_append | usecase_dwd | t_dim_user | user_account | | | | | | | | | | | | |
+| usecase_ods | test_user | bz_time | incremental_append | usecase_dwd | t_dim_user | bz_time | TRUE | | | | | | | | | | | |
+
+
+
+
+
+:::note
+You need to rename the sheet name of Case2 to `Fact` to ensure the above command to execute successfully.
+Then there will be 2 sql files corresponding to Case2 generated on your desktop, the first one is named `dwd_t_dim_user.sql`:
+:::
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_user
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`efa9e5ec`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `id` as `id`,
+ `user_name` as `user_name`,
+ `user_account` as `user_account`,
+ `bz_time` as `bz_time`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_user`;
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=t_dim_user
+-- target=temp
+-- tableName=`8c325c5e`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`t_dim_user` `t_dim_user`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`t_dim_user` `t_dim_user`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`efa9e5ec`
+-- dwViewName=`8c325c5e`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=t_dim_user
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+```
+
+The second one is named `dwd_test_fact_split.sql`:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_split
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`9e69b8df`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `test_split`.`id` as `id`,
+ `test_split`.`bz_time` as `bz_time`,
+ ifnull(`t_dim_user`.`id`, '-1') as `user_id`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_split` `test_split`
+ left join `usecase_dwd`.`t_dim_user` `t_dim_user`
+ on `test_split`.`user_id` = `t_dim_user`.`id` and `t_dim_user`.is_latest = '1';
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_split
+-- target=temp
+-- tableName=`8efabcf7`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_fact_split` `test_fact_split`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_fact_split` `test_fact_split`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`9e69b8df`
+-- dwViewName=`8efabcf7`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_split
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+```
+
+
+
+
+
+## Case 3
+
+Case 3 is generated by joining with the dim table which has the single primary key.
+
+
+
+
+
+| source_db_name | source_table_name | column_name | incremental_type | target_db_name | target_table_name | Target column_name | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column |
+| :------------- | :---------------- | :----------------- | :----------------- | :------------- | :---------------- | :----------------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :------------- | :-------------- |
+| usecase_ods | test_fact_case_3 | id | incremental_append | usecase_dwd | test_fact_target_case_3 | id | | TRUE | | | | | | | | | | | |
+| usecase_ods | test_fact_case_3 | real_cust_id | incremental_append | usecase_dwd | test_fact_target_case_3 | real_cust_id | | | null check, power null check | TRUE | | | TRUE | | usecase_dwd | test_cust_case_3 | id | | |
+| usecase_ods | test_fact_case_3 | real_cust_code | incremental_append | usecase_dwd | test_fact_target_case_3 | real_cust_code | | | | | | TRUE | TRUE | | usecase_dwd | test_cust_case_3 | code | | |
+| usecase_ods | test_fact_case_3 | real_cust_bz_time | incremental_append | usecase_dwd | test_fact_target_case_3 | real_cust_bz_time | | | | | TRUE | TRUE | TRUE | | usecase_dwd | test_cust_case_3 | bz_time | | |
+| usecase_ods | test_fact_case_3 | bz_time | incremental_append | usecase_dwd | test_fact_target_case_3 | bz_time | TRUE | | | | | | | | | | | | |
+
+
+
+
+After running the same command, the sql file named `default_test_fact_target.sql` corresponding to Case3 will be generated:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_fact_case_3
+-- options
+-- idColumn=id
+-- column.real_cust_id.qualityCheckRules=null check, power null check
+-- target=temp
+-- tableName=`a8cc8c22`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select *
+from `usecase_ods`.`test_fact_case_3`;
+
+-- step=3
+-- source=temp
+-- target=temp
+-- tableName=`ce47db66`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with join_fact_temp as (select nullif(`test_fact_case_3`.`real_cust_id`, `test_cust_case_3`.`id`) as `real_cust_id`,
+ nullif(`test_fact_case_3`.`real_cust_code`, `test_cust_case_3`.`code`) as `real_cust_code`,
+ nullif(`test_fact_case_3`.`real_cust_bz_time`, `test_cust_case_3`.`bz_time`) as `real_cust_bz_time`
+ from `a8cc8c22` `test_fact_case_3`
+ left join `usecase_dwd`.`test_cust_case_3` `test_cust_case_3`
+ on `test_fact_case_3`.`real_cust_id` = `test_cust_case_3`.`id`
+ and `test_cust_case_3`.`is_latest` = '1'),
+ distinct_dim_temp as (select `real_cust_id`,
+ `real_cust_code`,
+ `real_cust_bz_time`
+ from join_fact_temp
+ where 1 = 1
+ and (`real_cust_id` is not null)
+ group by `real_cust_id`, `real_cust_code`, `real_cust_bz_time`
+ grouping sets (
+ ( `real_cust_id`, `real_cust_code`, `real_cust_bz_time`)
+ ))
+select `real_cust_id` as `real_cust_id`,
+ first_value(`real_cust_code`) as `real_cust_code`,
+ first_value(`real_cust_bz_time`) as `real_cust_bz_time`,
+ count(1) as `distinct_count_num`
+from distinct_dim_temp
+group by `real_cust_id`
+ grouping sets (
+ ( `real_cust_id`)
+ )
+having 1 = 1
+ and (`real_cust_id` is not null);
+
+-- step=4
+-- source=temp
+-- target=temp
+-- tableName=test_cust_case_3__f1e256b9
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `real_cust_id` as `id`,
+ `real_cust_code` as `code`,
+ `real_cust_bz_time` as `bz_time`,
+ '1' as `is_auto_create`,
+ '${EFFECTIVE_START_TIME}' as `effective_start_time`,
+ '9999-01-01 00:00:00' as `effective_end_time`,
+ '1' as `is_active`,
+ '1' as `is_latest`,
+ '${DATA_RANGE_START}' as `idempotent_key`,
+ '${DATE_END}' as `dw_insert_date`
+from `ce47db66` `ce47db66`
+where 1 = 1
+ and (`real_cust_id` is not null)
+ and `distinct_count_num` = 1;
+
+-- step=5
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_cust_case_3
+-- target=temp
+-- tableName=`8b936862`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_cust_case_3`;
+
+-- step=6
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=test_cust_case_3__f1e256b9
+-- dwViewName=`8b936862`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_cust_case_3
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+-- step=7
+-- source=temp
+-- target=temp
+-- tableName=test_fact_target_case_3__6637c70e
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with duplicate_dimension_temp as (
+ select *
+ from `ce47db66` `ce47db66`
+ where 1 = 1
+ and `distinct_count_num` > 1
+)
+select `test_fact_case_3`.`id` as `id`,
+ `test_fact_case_3`.`bz_time` as `bz_time`,
+ (case
+ when `duplicate_dimension_temp_0`.`real_cust_id` is not null
+ then '-99'
+ else IFNULL(`test_cust_case_3`.`id`, '-1')
+ end) as `real_cust_id`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `a8cc8c22` `test_fact_case_3`
+ left join `usecase_dwd`.`test_cust_case_3` `test_cust_case_3`
+ on `test_fact_case_3`.`real_cust_id` = `test_cust_case_3`.`id` and `test_cust_case_3`.is_latest = '1'
+ left join `duplicate_dimension_temp` `duplicate_dimension_temp_0`
+ on `test_fact_case_3`.`real_cust_id` = `duplicate_dimension_temp_0`.`real_cust_id`;
+
+-- step=8
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select '' as `DW_PARTITION_CLAUSE`;
+
+-- step=9
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case_3
+-- target=temp
+-- tableName=`1a280f2f`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_fact_target_case_3` `test_fact_target_case_3`
+ ${DW_PARTITION_CLAUSE};
+
+-- step=10
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=test_fact_target_case_3__6637c70e
+-- dwViewName=`1a280f2f`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case_3
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+```
+
+
+
+
+## Case 4
+In Case4, these 2 columns: area_code and area_name, are composite keys, to generate a new primary key (area_id) for the dim table.
+
+
+
+
+
+| source_db_name | source_table_name | Source column name | incremental_type | target_db_name | target_table_name | Target column_name | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column
+| :------------- | :---------------- | :----------------- | :----------------- | :------------- | :---------------- | :----------------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :------------- | :-------------- |
+| uesecase_ods | test_fact_auto_dim | id | incremental_append | usecase_dwd | test_fact_target_auto_dim | id | | TRUE | | | | | | | | | | |
+| uesecase_ods | test_fact_auto_dim | | incremental_append | usecase_dwd | test_fact_target_auto_dim | area_id | | | | | | | TRUE | TRUE | usecase_dwd | test_area | id | |
+| uesecase_ods | test_fact_auto_dim | area_code | incremental_append | usecase_dwd | test_fact_target_auto_dim | area_code | | | | TRUE | | | TRUE | | usecase_dwd | test_area | area_cd | |
+| uesecase_ods | test_fact_auto_dim | area_name | incremental_append | usecase_dwd | test_fact_target_auto_dim | area_name | | | | TRUE | | | TRUE | | usecase_dwd | test_area | area_nm | |
+| uesecase_ods | test_fact_auto_dim | area_bz_time | incremental_append | usecase_dwd | test_fact_target_auto_dim | area_bz_time | | | | | TRUE | TRUE | TRUE | | usecase_dwd | test_area | bz_time | |
+| uesecase_ods | test_fact_auto_dim | bz_time | incremental_append | usecase_dwd | test_fact_target_auto_dim | bz_time | TRUE | | | | | | | | | | | |
+
+
+
+
+
+After running the same command, the sql file corresponding to Case4 named `default_test_fact_target.sql` will be generated on your desktop:
+
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_fact_auto_dim
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`634737e2`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select *
+from `usecase_ods`.`test_fact_auto_dim`;
+
+-- step=3
+-- source=temp
+-- target=temp
+-- tableName=`cb939fe5`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with join_fact_temp as (select nullif(`test_fact_auto_dim`.`area_code`, `test_area`.`area_cd`) as `area_code`,
+ nullif(`test_fact_auto_dim`.`area_name`, `test_area`.`area_nm`) as `area_name`,
+ nullif(`test_fact_auto_dim`.`area_bz_time`, `test_area`.`bz_time`) as `area_bz_time`
+ from `634737e2` `test_fact_auto_dim`
+ left join `usecase_dwd`.`test_area` `test_area`
+ on `test_fact_auto_dim`.`area_code` = `test_area`.`area_cd`
+ and `test_fact_auto_dim`.`area_name` = `test_area`.`area_nm`
+ and `test_area`.`is_latest` = '1'),
+ distinct_dim_temp as (select `area_code`,
+ `area_name`,
+ `area_bz_time`
+ from join_fact_temp
+ where 1 = 1
+ and (`area_code` is not null and `area_name` is not null)
+ group by `area_code`, `area_name`, `area_bz_time`
+ grouping sets (
+ ( `area_code`, `area_name`, `area_bz_time`)
+ ))
+select `area_code` as `area_code`,
+ `area_name` as `area_name`,
+ first_value(`area_bz_time`) as `area_bz_time`,
+ count(1) as `distinct_count_num`
+from distinct_dim_temp
+group by `area_code`, `area_name`
+ grouping sets (
+ ( `area_code`, `area_name`)
+ )
+having 1 = 1
+ and (`area_code` is not null and `area_name` is not null);
+
+-- step=4
+-- source=temp
+-- target=temp
+-- tableName=test_area__f5045d67
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select uuid() as `id`,
+ `area_code` as `area_cd`,
+ `area_name` as `area_nm`,
+ `area_bz_time` as `bz_time`,
+ '1' as `is_auto_create`,
+ '${EFFECTIVE_START_TIME}' as `effective_start_time`,
+ '9999-01-01 00:00:00' as `effective_end_time`,
+ '1' as `is_active`,
+ '1' as `is_latest`,
+ '${DATA_RANGE_START}' as `idempotent_key`,
+ '${DATE_END}' as `dw_insert_date`
+from `cb939fe5` `cb939fe5`
+where 1 = 1
+ and (`area_code` is not null and
+ `area_name` is not null)
+ and `distinct_count_num` = 1;
+
+-- step=5
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_area
+-- target=temp
+-- tableName=`8878baea`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_area`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_area`);
+
+-- step=6
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=test_area__f5045d67
+-- dwViewName=`8878baea`
+-- primaryFields=area_cd,area_nm
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_area
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+-- step=7
+-- source=temp
+-- target=temp
+-- tableName=test_fact_target_auto_dim__d3531bfc
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with duplicate_dimension_temp as (
+ select *
+ from `cb939fe5` `cb939fe5`
+ where 1 = 1
+ and `distinct_count_num` > 1
+)
+select `test_fact_auto_dim`.`id` as `id`,
+ `test_fact_auto_dim`.`bz_time` as `bz_time`,
+ (case
+ when `duplicate_dimension_temp_0`.`area_code` is not null and `duplicate_dimension_temp_0`.`area_name` is not null
+ then '-99'
+ else IFNULL(`test_area`.`id`, '-1')
+ end) as ``,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `634737e2` `test_fact_auto_dim`
+ left join `usecase_dwd`.`test_area` `test_area`
+ on `test_fact_auto_dim`.`area_code` = `test_area`.`area_cd`
+ and `test_fact_auto_dim`.`area_name` = `test_area`.`area_nm` and `test_area`.is_latest = '1'
+ left join `duplicate_dimension_temp` `duplicate_dimension_temp_0`
+ on `test_fact_auto_dim`.`area_code` = `duplicate_dimension_temp_0`.`area_code`
+ and `test_fact_auto_dim`.`area_name` = `duplicate_dimension_temp_0`.`area_name`;
+
+-- step=8
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_auto_dim
+-- target=temp
+-- tableName=`a3652e87`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_fact_target_auto_dim` `test_fact_target_auto_dim`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_fact_target_auto_dim` `test_fact_target_auto_dim`);
+
+-- step=9
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`634737e2`
+-- dwViewName=`a3652e87`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_auto_dim
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+```
+
+
+
+## Case 5
+
+if origin data has no id column
+
+not supported for now.
+
+
+## Case 6
+
+In Case 6, the origin data has no business time column.
+
+
+
+
+
+| source_db_name | source_table_name | column_name | incremental_type | target_db_name | target_table_name | target_column_name | expression | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column|
+| :------------- | :---------------- | :---------- | :----------------- | :------------- | :---------------- | :----------------- | :--------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :---------- | :--------------|
+| usecase_ods | test_fact_case_6 | id | incremental_append | usecase_dwd | test_fact_target_case_6 | id | TRUE | TRUE | | | | | | | | | | | |
+| usecase_ods | test_fact_case_6 | | incremental_append | usecase_dwd | test_fact_target_case_6 | area_id | | | | | TRUE | TRUE | | TRUE | | usecase_dwd | test_area_case_6 | id | |
+| usecase_ods | test_fact_case_6 | area_code | incremental_append | usecase_dwd | test_fact_target_case_6 | area_code | | | | | | | TRUE | TRUE | | usecase_dwd | test_area_case_6 | area_cd | |
+| usecase_ods | test_fact_case_6 | area_name | incremental_append | usecase_dwd | test_fact_target_case_6 | area_name | | | | | | | TRUE | TRUE | | usecase_dwd | test_area_case_6 | area_nm | |
+
+
+
+
+
+After running the same command, the sql file `default_test_fact_target.sql` will be generated:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_fact_case_6
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`b53c9449`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select *
+from `usecase_ods`.`test_fact_case_6`;
+
+-- step=3
+-- source=temp
+-- target=temp
+-- tableName=`6954aac5`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with join_fact_temp as (select nullif(`test_fact_case_6`.`area_id`, `test_area_case_6`.`id`) as `area_id`,
+ nullif(`test_fact_case_6`.`area_code`, `test_area_case_6`.`area_cd`) as `area_code`,
+ nullif(`test_fact_case_6`.`area_name`, `test_area_case_6`.`area_nm`) as `area_name`
+ from `b53c9449` `test_fact_case_6`
+ left join `usecase_dwd`.`test_area_case_6` `test_area_case_6`
+ on `test_fact_case_6`.`area_id` = `test_area_case_6`.`id`
+ and `test_area_case_6`.`is_latest` = '1'),
+ distinct_dim_temp as (select `area_id`,
+ `area_code`,
+ `area_name`
+ from join_fact_temp
+ where 1 = 1
+ and (`area_id` is not null)
+ group by `area_id`, `area_code`, `area_name`
+ grouping sets (
+ ( `area_id`, `area_code`, `area_name`)
+ ))
+select `area_id` as `area_id`,
+ first_value(`area_code`) as `area_code`,
+ first_value(`area_name`) as `area_name`,
+ count(1) as `distinct_count_num`
+from distinct_dim_temp
+group by `area_id`
+ grouping sets (
+ ( `area_id`)
+ )
+having 1 = 1
+ and (`area_id` is not null);
+
+-- step=4
+-- source=temp
+-- target=temp
+-- tableName=test_area_case_6__ebc46690
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `area_id` as `id`,
+ `area_code` as `area_cd`,
+ `area_name` as `area_nm`,
+ '1' as `is_auto_create`,
+ '${EFFECTIVE_START_TIME}' as `effective_start_time`,
+ '9999-01-01 00:00:00' as `effective_end_time`,
+ '1' as `is_active`,
+ '1' as `is_latest`,
+ '${DATA_RANGE_START}' as `idempotent_key`,
+ '${DATE_END}' as `dw_insert_date`
+from `6954aac5` `6954aac5`
+where 1 = 1
+ and (`area_id` is not null)
+ and `distinct_count_num` = 1;
+
+-- step=5
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_area_case_6
+-- target=temp
+-- tableName=`1c7bcb60`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_area_case_6`;
+
+-- step=6
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=id
+-- odsViewName=test_area_case_6__ebc46690
+-- dwViewName=`1c7bcb60`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_area_case_6
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+-- step=7
+-- source=temp
+-- target=temp
+-- tableName=test_fact_target_case_6__e9ae24c3
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+with duplicate_dimension_temp as (
+ select *
+ from `6954aac5` `6954aac5`
+ where 1 = 1
+ and `distinct_count_num` > 1
+)
+select `test_fact_case_6`.`id` as `id`,
+ (case
+ when `duplicate_dimension_temp_0`.`area_id` is not null
+ then '-99'
+ else IFNULL(`test_area_case_6`.`id`, '-1')
+ end) as `area_id`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `b53c9449` `test_fact_case_6`
+ left join `usecase_dwd`.`test_area_case_6` `test_area_case_6`
+ on `test_fact_case_6`.`area_id` = `test_area_case_6`.`id` and `test_area_case_6`.is_latest = '1'
+ left join `duplicate_dimension_temp` `duplicate_dimension_temp_0`
+ on `test_fact_case_6`.`area_id` = `duplicate_dimension_temp_0`.`area_id`;
+
+-- step=8
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select '' as `DW_PARTITION_CLAUSE`;
+
+-- step=9
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case_6
+-- target=temp
+-- tableName=`775d93f3`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_fact_target_case_6` `test_fact_target_case_6`
+ ${DW_PARTITION_CLAUSE};
+
+-- step=10
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=id
+-- odsViewName=test_fact_target_case_6__e9ae24c3
+-- dwViewName=`775d93f3`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case_6
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+```
+
+
+
+
+Script for creating ODS tables:
+
+
+```sql
+create table usecase_ods.test_fact
+(
+ id varchar(255),
+ area_code varchar(255),
+ area_name varchar(255),
+ job_time timestamp
+)
+```
+
+Insert data to ODS:
+
+```sql
+insert into usecase_ods.test_fact values(1, '123', 'area-123', '2020-01-01 15:05:05');
+insert into usecase_ods.test_fact values(2, '456', 'area-456', '2020-11-01 15:05:05');
+```
+
+
+
+
+Script for creating DWD tables.
+
+```sql
+create table usecase_dwd.test_fact_target
+(
+ id varchar(255),
+ area_id varchar(255),
+ job_time timestamp,
+ is_auto_create varchar(255),
+ effective_start_time timestamp,
+ effective_end_time timestamp,
+ is_active varchar(255),
+ is_latest varchar(255),
+ idempotent_key varchar(255),
+ dw_insert_date varchar(255)
+);
+
+create table usecase_dwd.test_area
+(
+ id varchar(255),
+ area_cd varchar(255),
+ area_nm varchar(255),
+ bz_time timestamp,
+ is_auto_create varchar(255),
+ effective_start_time timestamp,
+ effective_end_time timestamp,
+ is_active varchar(255),
+ is_latest varchar(255),
+ idempotent_key varchar(255),
+ dw_insert_date varchar(255)
+);
+
+```
+
+
+
+
+## Case 7
+
+if origin system do HARD delete
+
+incremental_type = incremental_diff
+
+## Case 8
+
+In Case8, the auto-created dim table combine from multiple fact table
+
+
+
+
+
+| source_db_name | source_table_name | column_name | incremental_type | target_db_name | target_table_name | target_column_name | expression | sort_column | id_column | quality_check_rules | dim_key | dim_sort_column | dim_description | auto_create_dim | auto_create_dim_id | dim_db_name | dim_table_name | dim_column_name | zip_dim_key | partition_column |
+| :------------- | :----------------- | :------------- | :----------------- | :------------- | :--------------------- | :----------------- | :--------- | :---------- | :-------- | :------------------ | :------ | :-------------- | :-------------- | :-------------- | :----------------- | :---------- | :------------- | :-------------- | :---------- | :--------------- |
+| usecase_ods | test_fact_case8 | id | incremental_append | usecase_dwd | test_fact_target_case8 | id | | | TRUE | | | | | | | | | | | |
+| usecase_ods | test_fact_case8 | region_id | incremental_append | usecase_dwd | test_fact_target_case8 | region_id | | | | | TRUE | | | TRUE | | usecase_dwd | test_region | id | | |
+| usecase_ods | test_fact_case8 | region_code | incremental_append | usecase_dwd | test_fact_target_case8 | region_code | | | | | | | TRUE | TRUE | | usecase_dwd | test_region | region_cd | | |
+| usecase_ods | test_fact_case8 | region_name | incremental_append | usecase_dwd | test_fact_target_case8 | region_name | | | | | | | TRUE | TRUE | | usecase_dwd | test_region | region_nm | | |
+| usecase_ods | test_fact_case8 | region_bz_time | incremental_append | usecase_dwd | test_fact_target_case8 | region_bz_time | | | | | | TRUE | TRUE | TRUE | | usecase_dwd | test_region | bz_time | | |
+| usecase_ods | test_fact_case8 | bz_time | incremental_append | usecase_dwd | test_fact_target_case8 | bz_time | | TRUE | | | | | | | | | | | | |
+| usecase_ods | test_store_fact | id | incremental_append | usecase_dwd | test_store_fact_target | id | | | TRUE | | | | | | | | | | | |
+| usecase_ods | test_store_fact | region_id | incremental_append | usecase_dwd | test_store_fact_target | region_id | | | | | TRUE | | | TRUE | | usecase_dwd | test_region | id | | |
+| usecase_ods | test_store_fact | region_count | incremental_append | usecase_dwd | test_store_fact_target | region_count | | | | | | | TRUE | TRUE | | usecase_dwd | test_region | region_ct | | |
+| usecase_ods | test_store_fact | region_address | incremental_append | usecase_dwd | test_store_fact_target | region_address | | | | | | | TRUE | TRUE | | usecase_dwd | test_region | region_address | | |
+| usecase_ods | test_store_fact | region_bz_time | incremental_append | usecase_dwd | test_store_fact_target | region_bz_time | | | | | | TRUE | TRUE | TRUE | | usecase_dwd | test_region | bz_time | | |
+| usecase_ods | test_store_fact | bz_time | incremental_append | usecase_dwd | test_store_fact_target | bz_time | | TRUE | | | | | | | | | | | | |
+| usecase_ods | test_region_source | id | incremental_append | usecase_dwd | test_region | id | | | TRUE | | | | | | | | | | | |
+| usecase_ods | test_region_source | region_cd | incremental_append | usecase_dwd | test_region | region_cd | | | | | | | | | | | | | | |
+| usecase_ods | test_region_source | region_nm | incremental_append | usecase_dwd | test_region | region_nm | | | | | | | | | | | | | | |
+| usecase_ods | test_region_source | region_ct | incremental_append | usecase_dwd | test_region | region_ct | | | | | | | | | | | | | | |
+| usecase_ods | test_region_source | region_address | incremental_append | usecase_dwd | test_region | region_address | | | | | | | | | | | | | | |
+| usecase_ods | test_region_source | bz_time | incremental_append | usecase_dwd | test_region | bz_time | | TRUE | | | | | | | | | | | | |
+| usecase_ods | test_region_source | | incremental_append | usecase_dwd | test_region | is_auto_create | '0' | | | | | | | | | | | | | |
+
+
+
+
+
+
+There will be 3 sql files generated on your desktop, named `usecase_dwd_test_region.sql`, `usecase_dwd_test_fact_target_case8.sql` and `usecase_dwd_test_store_fact_target.sql`, respectively.
+
+`usecase_dwd_test_region.sql`:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_region_source
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`b84d02c6`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `id` as `id`,
+ `region_cd` as `region_cd`,
+ `region_nm` as `region_nm`,
+ `region_ct` as `region_ct`,
+ `region_address` as `region_address`,
+ `bz_time` as `bz_time`,
+ '0' as `is_auto_create`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_region_source`;
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_region
+-- target=temp
+-- tableName=`eac5375c`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_region` `test_region`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_region` `test_region`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`b84d02c6`
+-- dwViewName=`eac5375c`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_region
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+```
+
+`usecase_dwd_test_fact_target_case8.sql`:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_fact_case8
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`c7a033fe`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `test_fact_case8`.`id` as `id`,
+ `test_fact_case8`.`bz_time` as `bz_time`,
+ ifnull(`test_region`.`id`, '-1') as `region_id`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_fact_case8` `test_fact_case8`
+ left join `usecase_dwd`.`test_region` `test_region`
+ on `test_fact_case8`.`region_id` = `test_region`.`id` and `test_region`.is_latest = '1';
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case8
+-- target=temp
+-- tableName=`b99ce421`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_fact_target_case8` `test_fact_target_case8`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_fact_target_case8` `test_fact_target_case8`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`c7a033fe`
+-- dwViewName=`b99ce421`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_fact_target_case8
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+```
+
+`usecase_dwd_test_store_fact_target.sql`:
+
+```sql
+-- step=1
+-- source=temp
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+select from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'yyyyMMdd') as `DATE_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_END}', 'yyyy-MM-dd HH:mm:ss'), 'HH') as `HOUR_END`,
+ from_unixtime(unix_timestamp('${DATA_RANGE_START}', 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as `EFFECTIVE_START_TIME`;
+
+-- step=2
+-- source=hive
+-- dbName=usecase_ods
+-- tableName=test_store_fact
+-- options
+-- idColumn=id
+-- target=temp
+-- tableName=`ee9d6f1e`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `test_store_fact`.`id` as `id`,
+ `test_store_fact`.`bz_time` as `bz_time`,
+ ifnull(`test_region`.`id`, '-1') as `region_id`,
+ '${EFFECTIVE_START_TIME}' as effective_start_time,
+ '9999-01-01 00:00:00' as effective_end_time,
+ '1' as is_active,
+ '1' as is_latest,
+ '${DATA_RANGE_START}' as idempotent_key,
+ '${DATE_END}' as dw_insert_date
+from `usecase_ods`.`test_store_fact` `test_store_fact`
+ left join `usecase_dwd`.`test_region` `test_region`
+ on `test_store_fact`.`region_id` = `test_region`.`id` and `test_region`.is_latest = '1';
+
+-- step=3
+-- source=hive
+-- dbName=usecase_dwd
+-- tableName=test_store_fact_target
+-- target=temp
+-- tableName=`ff1acfcc`
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+select `(dw_insert_date)?+.+`,
+ '${DATE_END}' as `dw_insert_date`
+from `usecase_dwd`.`test_store_fact_target` `test_store_fact_target`
+where `dw_insert_date` = (select max(`dw_insert_date`)
+ from `usecase_dwd`.`test_store_fact_target` `test_store_fact_target`);
+
+-- step=4
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.ZipTableTransformer
+-- methodName=transform
+-- dwDataLoadType=incremental
+-- sortFields=bz_time
+-- odsViewName=`ee9d6f1e`
+-- dwViewName=`ff1acfcc`
+-- primaryFields=id
+-- transformerType=object
+-- target=hive
+-- dbName=usecase_dwd
+-- tableName=test_store_fact_target
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=overwrite
+
+
+```
+
+
+
+
+Script for creating ODS tables:
+
+
+```sql
+create table usecase_ods.test_fact_case8
+(
+ id varchar(255),
+ region_id varchar(255),
+ region_code varchar(255),
+ region_name varchar(255),
+ region_bz_time timestamp,
+ bz_time timestamp,
+ job_id varchar(255),
+ job_time timestamp
+);
+
+create table usecase_ods.test_store_fact
+(
+ id varchar(255),
+ region_id varchar(255),
+ region_count varchar(255),
+ region_address varchar(255),
+ region_bz_time timestamp,
+ bz_time timestamp,
+ job_id varchar(255),
+ job_time timestamp
+);
+
+create table usecase_ods.test_region_source
+(
+ id varchar(255),
+ region_cd varchar(255),
+ region_nm varchar(255),
+ region_ct int,
+ region_address varchar(255),
+ bz_time timestamp,
+ job_id varchar(255),
+ job_time timestamp
+);
+```
+
+Insert data to ODS:
+
+```sql
+insert into usecase_ods.test_fact_case8 values(1, '1', '123', 'region-123', '2020-01-01 15:05:05', '2020-01-01 15:05:05', '1', '2020-01-01 15:05:05');
+insert into usecase_ods.test_fact_case8 values(2, '2', '456', 'region-456', '2020-11-01 15:05:05', '2020-11-01 15:05:05', '2', '2020-11-01 15:05:05');
+
+insert into usecase_ods.test_store_fact values(1, '1', 123, 'address-123', '2020-01-01 15:05:05', '2020-01-01 15:05:05', '1', '2020-01-01 15:05:05');
+insert into usecase_ods.test_store_fact values(2, '2', 456, 'address-456', '2020-11-01 15:05:05', '2020-11-01 15:05:05', '2', '2020-11-01 15:05:05');
+
+insert into usecase_ods.test_region_source values(1, '123', 'region-123', 123, 'address-123', '2020-01-01 15:05:05', '1', '2020-01-01 15:05:05');
+insert into usecase_ods.test_region_source values(2, '456', 'region-456', 456, 'address-456', '2020-11-01 15:05:05', '2', '2020-11-01 15:05:05');
+```
+
+
+
+
+Script for creating DWD tables.
+
+```sql
+create table usecase_dwd.test_fact_target_case8
+(
+ id varchar(255),
+ region_id varchar(255),
+ bz_time timestamp,
+ job_time timestamp,
+ effective_start_time timestamp,
+ effective_end_time timestamp,
+ is_active varchar(255),
+ is_latest varchar(255),
+ idempotent_key varchar(255),
+ dw_insert_date varchar(255)
+);
+
+create table usecase_dwd.test_store_fact_target
+(
+ id varchar(255),
+ region_id varchar(255),
+ bz_time timestamp,
+ job_time timestamp,
+ effective_start_time timestamp,
+ effective_end_time timestamp,
+ is_active varchar(255),
+ is_latest varchar(255),
+ idempotent_key varchar(255),
+ dw_insert_date varchar(255)
+);
+
+create table usecase_dwd.test_region
+(
+ id varchar(255),
+ region_cd varchar(255),
+ region_nm varchar(255),
+ region_ct int,
+ region_address varchar(255),
+ bz_time timestamp,
+ job_time timestamp,
+ is_auto_create varchar(255),
+ effective_start_time timestamp,
+ effective_end_time timestamp,
+ is_active varchar(255),
+ is_latest varchar(255),
+ idempotent_key varchar(255),
+ dw_insert_date varchar(255)
+);
+```
+
+> Please Note: field `is_auto_create` is required for **Dim** table. It is used to mark whether this is from source data or generated by ETL framework.
+
+
+
+
+
diff --git a/website/docs/excel-template-ods.md b/website/docs/excel-template-ods.md
new file mode 100644
index 0000000..7b0a996
--- /dev/null
+++ b/website/docs/excel-template-ods.md
@@ -0,0 +1,86 @@
+---
+title: "Excel template for source to ods"
+sidebar_position: 3
+toc: true
+last_modified_at: 2021-10-21T10:59:57-04:00
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+## pre-requirements
+
+* all tables must exist before running ETL jobs(ETL doesn't create table)
+
+* you can download [this excel](https://docs.google.com/spreadsheets/d/1Prw1LFfkSkaAuf1K6O0TTI5PPRP7lLtzIR63x9HCSVw/edit#gid=1642393109) to your `~/Desktop` for the quick start guide.
+## config sample
+
+
+
+
+
+| source_db_name | source_table_name | source_column_name | is_PK | incremental_column | additional_filter | target_db_name | target_table_name | target_column_name | expression | incremental_type | partition column | update_frequency |
+| :------------- | :---------------- | :----------------- | :---- | :----------------- | :---------------- | :------------- | :---------------- | :----------------- | :--------- | :----------------- | :--------------- | :--------------- |
+| db_name | table_name | xx1 | 1 | | | db_name | ods_table_name | xx1 | | incremental_append | | 1440 |
+| db_name | table_name | xx2 | | | | db_name | ods_table_name | xx2 | | incremental_append | | 1440 |
+| db_name | table_name | xx3 | | | | db_name | ods_table_name | xx3 | | incremental_append | | 1440 |
+| db_name | table_name | xx4 | | | | db_name | ods_table_name | xx4 | | incremental_append | | 1440 |
+| db_name | table_name | xx5 | | | | db_name | ods_table_name | xx5 | | incremental_append | | 1440 |
+| db_name | table_name | xx6 | | | | db_name | ods_table_name | xx6 | | incremental_append | | 1440 |
+| db_name | table_name | xx7 | | | | db_name | ods_table_name | xx7 | | incremental_append | | 1440 |
+| db_name | table_name | xx8 | | | | db_name | ods_table_name | xx8 | | incremental_append | | 1440 |
+| db_name | table_name | xx9 | | | | db_name | ods_table_name | xx9 | | incremental_append | | 1440 |
+| db_name | table_name | xx10 | | TRUE | | db_name | ods_table_name | xx10 | | incremental_append | | 1440 |
+| db_name | table_name | | | | | db_name | ods_table_name | job_id | ${JOB_ID} | incremental_append | | 1440 |
+| db_name | table_name | | | | | db_name | ods_table_name | job_time | now() | incremental_append | | 1440 |
+| db_name | table_name | | | | | db_name | ods_table_name | load_dt | now() | incremental_append | | 1440 |
+
+
+
+
+
+By running the following command, there is a sql file generated on your desktop:
+
+```bash
+./gradlew :spark:run --args="generate-ods-sql -f ~/Desktop/数据字典-模版.xlsx --output ~/Desktop/"
+```
+
+Then the .sql file illustrates the steps on how to handle the data from the excel config.
+
+```sql
+-- step=1
+-- source=hive
+-- dbName=db_name
+-- tableName=table_name
+-- target=hive
+-- dbName=db_name
+-- tableName=ods_table_name
+-- checkPoint=false
+-- dateRangeInterval=0
+-- writeMode=append
+-- incrementalType=incremental_append
+SELECT `xx1` AS `xx1`,
+ `xx2` AS `xx2`,
+ `xx3` AS `xx3`,
+ `xx4` AS `xx4`,
+ `xx5` AS `xx5`,
+ `xx6` AS `xx6`,
+ `xx7` AS `xx7`,
+ `xx8` AS `xx8`,
+ `xx9` AS `xx9`,
+ `xx10` AS `xx10`,
+ ${JOB_ID} AS `job_id`,
+ now() AS `job_time`,
+ now() AS `load_dt`
+FROM `db_name`.`table_name`
+WHERE `xx10` >= '${DATA_RANGE_START}' AND `xx10` < '${DATA_RANGE_END}';
+```
+
+
+
+
diff --git a/website/docs/ods-config-template.md b/website/docs/ods-config-template.md
new file mode 100644
index 0000000..15b21b2
--- /dev/null
+++ b/website/docs/ods-config-template.md
@@ -0,0 +1,69 @@
+---
+title: "ODS config template"
+sidebar_position: 3
+toc: true
+last_modified_at: 2022-11-23T17:59:57-04:00
+---
+
+本片文档主要介绍ODS配置模板的参数和使用方式。
+
+配置模板example可以参考quick start的[配置文件](https://docs.google.com/spreadsheets/d/1eRgSHWKDaRufvPJLp9QhcnWiVKzRegQ6PeZocvAgHEo/edit#gid=0)。
+
+## 数据源配置:ods_etl_config
+
+`source_connection`: 配置在application.properties中的connection
+
+`source_table`: 从哪张表获取数据
+
+`source_db`: 从哪个数据库获取数据
+
+`source_type`: 数据库类型,例如:mysql
+
+`target_connection`: 目标连接,配置在application.properties中的connection。例如:hive
+
+`target_table`: ods表名
+
+`target_db`: ods数据库库名
+
+`target_type`: 目标数据库类型,例如:hive
+
+`row_filter_expression`: 是否可空:是。例如:location = 'shanghai',表示只取上海地区的数据。会作为where表达式拼接在查询源数据表的sql中
+
+`load_type`: 增量全量,可选值:incremental,full
+
+`log_driven_type`: 日志驱动类型,可选值:timewindow/upstream/kafka_offset/auto_inc_id/diff
+
+`upstream`: 依赖于哪一个上游任务,对于ods任务而言,一般为空
+
+`depends_on`: 依赖于哪一个任务,对于ods任务而言,一般为空
+
+`default_start`: timewindow模式下的开始时间
+
+`partition_format`: 分区格式,可选值:空字符串或者year/month/day
+
+`time_format`: 时间格式,默认值:YYYY-MM-DD hh:mm:ss
+
+`period`: 多少分钟运行一次任务,对于daily的任务应为1440
+
+
+## 表配置:ods_config
+
+`source_table`: 从哪张表获取数据
+
+`source_column`: 源表列名称
+
+`column_type`: 源表列类型
+
+`column_description`: 源表列描述
+
+`is_PK`: 源表是否主键
+
+`is_nullable`: 源表是否可空
+
+`incremental_column`: 增量列,一般为业务时间字段
+
+`target_table`: 目标表名
+
+`target_column`: 目标表列
+
+`extra_column_expression`: 扩展列表达式,可以在源表多个列的基础上做sql表达式计算,例如 md5(concat_ws('', user_name, .. , user_address)),结果作为新列插入目标列,这时对应的源列为空值。
\ No newline at end of file
diff --git a/website/docs/properties-file-config.md b/website/docs/properties-file-config.md
new file mode 100644
index 0000000..244f612
--- /dev/null
+++ b/website/docs/properties-file-config.md
@@ -0,0 +1,125 @@
+---
+title: "Properties file config"
+sidebar_position: 8
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+## Sharp ETL config
+
+### `etl.workflow.path`
+
+从哪里查找workflow文件,默认从classpath中的`tasks`目录查找,可以指定外部路径,目前仅支持HDFS路径。
+
+classpath例子:
+
+```properties
+etl.workflow.path=tasks
+```
+
+HDFS例子:
+
+```properties
+etl.workflow.path=HDFS:///etl/workflows
+```
+
+## Spark config
+
+在properties中内置的默认配置如下:
+
+```properties
+spark.default.spark.sql.adaptive.enabled=true
+spark.default.spark.sql.adaptive.logLevel=info
+spark.default.spark.sql.adaptive.advisoryPartitionSizeInBytes=128m
+spark.default.spark.sql.adaptive.coalescePartitions.enabled=true
+spark.default.spark.sql.adaptive.coalescePartitions.minPartitionNum=1
+spark.default.spark.sql.adaptive.fetchShuffleBlocksInBatch=true
+spark.default.spark.sql.adaptive.localShuffleReader.enabled=true
+spark.default.spark.sql.adaptive.skewJoin.enabled=true
+spark.default.spark.sql.adaptive.skewJoin.skewedPartitionFactor=5
+spark.default.spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes=400m
+spark.default.spark.sql.adaptive.nonEmptyPartitionRatioForBroadcastJoin=0.2
+spark.default.spark.sql.autoBroadcastJoinThreshold=-1
+spark.default.spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728
+spark.default.hive.exec.dynamic.partition=true
+spark.default.hive.exec.dynamic.partition.mode=nonstrict
+spark.default.spark.sql.sources.partitionOverwriteMode=dynamic
+spark.default.spark.serializer=org.apache.spark.serializer.KryoSerializer
+spark.default.spark.kryoserializer.buffer.max=128m
+spark.default.spark.sql.crossJoin.enabled=true
+spark.default.spark.driver.cores=1
+spark.default.spark.driver.memory=1g
+spark.default.spark.driver.memoryOverhead=1g
+spark.default.spark.driver.maxResultSize=0
+spark.default.spark.executor.cores=2
+spark.default.spark.executor.memory=4g
+spark.default.spark.executor.memoryOverhead=2g
+spark.default.spark.dynamicAllocation.enabled=true
+spark.default.spark.shuffle.service.enabled=true
+spark.default.spark.dynamicAllocation.minExecutors=1
+spark.default.spark.dynamicAllocation.maxExecutors=4
+spark.default.spark.streaming.stopGracefullOnShutdown=true
+spark.default.spark.streaming.backpressure.enable=true
+spark.default.spark.streaming.kafka.maxRatePerPartition=100000
+```
+
+:::tip
+内置的spark config对所有任务起效,可以通过`spark.your workflow name.spark.config=xxx`的方式来覆盖全局默认配置。
+:::
+
+:::tip
+在workflow内可以针对某一个step配置spark config:
+
+```sql
+-- step=set config in conf
+-- source=temp
+-- target=temp
+-- tableName=do_nothing_table
+-- conf
+-- spark.sql.shuffle.partitions=1
+SELECT 'result';
+```
+
+或者直接使用spark sql:
+
+```sql
+-- step=3
+-- source=temp
+-- target=temp
+-- tableName=do_nothing_table
+SET spark.sql.hive.version=0.12.1;
+```
+
+:::
+
+## Connection
+
+针对不同的系统可以配置对应的connection,配置方式请参考对应的[Datasource](/docs/datasource)中的配置示例来使用。
+配置结构为: `connection name.source/target type.config=value`
+
+
+## Using external properties file
+
+可以使用外部properties文件,当需要在不同环境运行任务,且可能properties配置中可能含有敏感信息时推荐使用。在启动任务时加入参数:
+
+```
+--property=hdfs:///etl/conf/prod.properties
+```
+
+
+## Override properties in command-line
+
+针对需要调试或临时修改的任务,可以在运行任务时选择覆盖properties文件中的内容。在启动任务时加入参数,多个参数请使用逗号分隔:
+
+```
+--override=mysql.password=XXXX,foo=bar
+```
+
+
+## Properties config priority
+
+在多个地方配置properties之后的优先级为(以spark conf为例):
+
+
+Workflow SQL `SET` syntax > Workflow `conf` > Command-line override > properties override > class path `application.properties` file
+
diff --git a/website/docs/quick-start-guide.md b/website/docs/quick-start-guide.md
new file mode 100644
index 0000000..48715e7
--- /dev/null
+++ b/website/docs/quick-start-guide.md
@@ -0,0 +1,307 @@
+---
+title: "Quick Start Guide"
+sidebar_position: 2
+toc: true
+last_modified_at: 2021-10-21T10:59:57-04:00
+---
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+This guide provides a quick peek to Sharp ETL's capabilities.
+For a video guide, please check:
+
+
+
+## Setup
+
+Sharp ETL works well with Spark-2.3+ & Spark-3.2.+ versions. You can follow the instructions [here](https://github.com/SharpData/SharpETL/blob/master/.github/workflows/build.yml#L15-L19) for supported spark version.
+
+
+
+
+Build from source for your spark version:
+
+```scala
+//for spark 3.1 with scala 2.12
+./gradlew buildJars -PscalaVersion=2.12 -PsparkVersion=3.1.2 -PscalaCompt=2.12.15
+
+//for spark 2.4 with scala 2.12
+./gradlew buildJars -PscalaVersion=2.12 -PsparkVersion=2.4.8 -PscalaCompt=2.12.15
+
+//for spark 2.4 with scala 2.11
+./gradlew buildJars -PscalaVersion=2.11 -PsparkVersion=2.4.8 -PscalaCompt=2.11.12
+```
+
+
+
+
+:::note Please note the following
+
+:::
+
+Start a postgres instance
+
+```bash
+docker run --name postgres -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 postgres:12.0-alpine
+```
+
+Start a ETL db instance
+
+```bash
+docker run --name mysql8 -d -p 3306:3306 -e MYSQL_ROOT_PASSWORD=root -e MYSQL_DATABASE=sharp_etl mysql:8.0
+```
+
+Suppose we have a table named `online_order` in postgres with schema `sales`:
+
+
+
+
+```sql
+-- This extension provides a function to generate a version 4 UUID, we must enable the extension first.
+CREATE EXTENSION IF NOT EXISTS "pgcrypto";
+create schema sales;
+create table sales.online_order
+(
+ order_no varchar(64) default gen_random_uuid() not null
+ primary key,
+ user_id varchar(32) not null,
+ user_name varchar(32) not null,
+ order_total_amount numeric,
+ actual_amount numeric,
+ post_amount numeric,
+ order_pay_amount numeric,
+ total_discount numeric,
+ pay_type varchar(32),
+ source_type varchar(32),
+ order_status varchar(32),
+ note varchar(32),
+ confirm_status varchar(32),
+ payment_time timestamp,
+ delivery_time timestamp,
+ receive_time timestamp,
+ comment_time timestamp,
+ delivery_company varchar(32),
+ delivery_code varchar(32),
+ business_date date default CURRENT_DATE,
+ return_flag varchar(32),
+ created_at timestamp default CURRENT_TIMESTAMP,
+ updated_at timestamp default CURRENT_TIMESTAMP,
+ deleted_at timestamp
+);
+```
+
+
+
+
+**We can download [this excel](https://docs.google.com/spreadsheets/d/1k4U2QgZyknJLfpJvVxASsiOcX2nIHX0tx_rUKAINLTY/edit#gid=0) to your `~/Desktop` for the quick start guide.**
+
+:::tip
+You can also use the existing [excel template](https://docs.google.com/spreadsheets/d/1eRgSHWKDaRufvPJLp9QhcnWiVKzRegQ6PeZocvAgHEo/edit#gid=0) for your new cases.
+:::
+
+## Generate sql files from excel config
+
+
+
+
+```bash
+./gradlew :spark:run --args="generate-ods-sql -f ~/Desktop/sharp-etl-Quick-Start-Guide.xlsx --output ~/Desktop/"
+```
+
+
+
+
+
+And you can see a new file generated at `~/Desktop/sales.online_order.sql`
+
+```sql
+-- workflow=ods__t_fact_online_order
+-- period=1440
+-- loadType=incremental
+-- logDrivenType=timewindow
+
+-- step=1
+-- source=postgres
+-- dbName=postgres
+-- tableName=sales.online_order
+-- target=hive
+-- dbName=ods
+-- tableName=t_fact_online_order
+-- writeMode=append
+SELECT "order_no" AS "order_no",
+ "user_id" AS "user_id",
+ "user_name" AS "user_name",
+ "order_total_amount" AS "order_total_amount",
+ "actual_amount" AS "actual_amount",
+ "post_amount" AS "post_amount",
+ "order_pay_amount" AS "order_pay_amount",
+ "total_discount" AS "total_discount",
+ "pay_type" AS "pay_type",
+ "source_type" AS "source_type",
+ "order_status" AS "order_status",
+ "note" AS "note",
+ "confirm_status" AS "confirm_status",
+ "payment_time" AS "payment_time",
+ "delivery_time" AS "delivery_time",
+ "receive_time" AS "receive_time",
+ "comment_time" AS "comment_time",
+ "delivery_company" AS "delivery_company",
+ "delivery_code" AS "delivery_code",
+ "business_date" AS "business_date",
+ "return_flag" AS "return_flag",
+ "created_at" AS "created_at",
+ "updated_at" AS "updated_at",
+ "deleted_at" AS "deleted_at",
+ ${JOB_ID} AS "job_id",
+ to_char("business_date", 'yyyy') as "year",
+ to_char("business_date", 'MM') as "month",
+ to_char("business_date", 'DD') as "day"
+FROM "postgres"."sales"."online_order"
+WHERE "business_date" >= '${DATA_RANGE_START}' AND "business_date" < '${DATA_RANGE_END}';
+```
+
+## Create ODS table
+
+
+
+
+```sql
+create schema ods;
+create table ods.t_fact_online_order
+(
+ order_no varchar(64) not null,
+ user_id varchar(32) not null,
+ user_name varchar(32) not null,
+ order_total_amount numeric,
+ actual_amount numeric,
+ post_amount numeric,
+ order_pay_amount numeric,
+ total_discount numeric,
+ pay_type varchar(32),
+ source_type varchar(32),
+ order_status varchar(32),
+ note varchar(32),
+ confirm_status varchar(32),
+ payment_time timestamp,
+ delivery_time timestamp,
+ receive_time timestamp,
+ comment_time timestamp,
+ delivery_company varchar(32),
+ delivery_code varchar(32),
+ business_date date default CURRENT_DATE,
+ return_flag varchar(32),
+ created_at timestamp default CURRENT_TIMESTAMP,
+ updated_at timestamp default CURRENT_TIMESTAMP,
+ deleted_at timestamp,
+ job_id varchar(16)
+);
+```
+
+
+
+## Insert data
+
+
+
+
+
+```sql
+insert into sales.online_order(order_no, user_id, user_name, order_total_amount, actual_amount, post_amount,
+ order_pay_amount,
+ total_discount, pay_type, source_type, order_status, payment_time, business_date,
+ created_at, updated_at, deleted_at)
+VALUES ('2021093000001', 1, '张三ð', 200.0, 100, 0, 99, 101.0, 'wechat', 'mini-program', 'paid', '2021-09-30 09:00:35',
+ '2021-09-30',
+ '2021-09-30 09:00:00', '2021-09-30 09:00:35', null),
+ ('2021093000002', 2, '李四o(╥﹏╥)o', 399.0, 200, 0, 200, 199.0, 'wechat', 'official-website', 'paid',
+ '2021-09-30 19:00:35',
+ '2021-09-30',
+ '2021-09-30 19:00:00', '2021-09-30 19:00:35', null);
+```
+
+
+
+
+## Before run job
+
+The db connection infomation is specified in `application.properties` and Make sure your db connections are included in the file. For a quick start, we need to add followings:
+
+```
+postgres.postgres.url=jdbc:postgresql://localhost:5432/postgres?stringtype=unspecified
+postgres.postgres.user=postgres
+postgres.postgres.password=postgres
+postgres.postgres.driver=org.postgresql.Driver
+postgres.postgres.fetchsize=10
+```
+
+
+## Run the job
+
+Then we will run a sample job which reads data from `sales.online_order` table and write them into `ods.t_fact_online_order`
+
+
+
+
+```bash
+# run single job by `spark-submit`
+spark-submit --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/spark-1.0.0-SNAPSHOT.jar single-job --name=sales.online_order --period=1440 --default-start-time="2021-09-30 00:00:00" --local --once
+
+# run single job locally
+./gradlew :spark:run --args="single-job --name=sales.online_order --period=1440 --default-start-time='2021-09-30 00:00:00' --local --once"
+```
+:::note
+You need to put sql file under `spark/src/main/resources/tasks` or put it into HDFS/DBFS to run `single-job`
+If you want to configure the sql file folder, please set `etl.workflow.path` in `application.properties`
+:::
+
+
+
+
+## Query job result
+
+
+
+
+```sql
+SELECT * FROM ods.t_fact_online_order;
+```
+
+| order_no | user_id | user_name | order_total_amount | actual_amount | post_amount | order_pay_amount | total_discount | pay_type | source_type | order_status | note | confirm_status | payment_time | delivery_time | receive_time | comment_time | delivery_company | delivery_code | business_date | return_flag | created_at | updated_at | deleted_at | job_id |
+| :------------ | :------ | :----------- | :----------------- | :------------ | :---------- | :--------------- | :------------- | :------- | :--------------- | :----------- | :--- | :------------- | :------------------------- | :------------ | :----------- | :----------- | :--------------- | :------------ | :------------ | :---------- | :------------------------- | :------------------------- | :--------- |:-------|
+| 2021093000002 | 2 | 李四o(╥﹏╥)o | 399 | 200 | 0 | 200 | 199 | wechat | official-website | paid | | | 2021-09-30 19:00:35.000000 | | | | | | 2021-09-30 | | 2021-09-30 19:00:00.000000 | 2021-09-30 19:00:35.000000 | | 2 |
+| 2021093000001 | 1 | 张三ð | 200 | 100 | 0 | 99 | 101 | wechat | mini-program | paid | | | 2021-09-30 09:00:35.000000 | | | | | | 2021-09-30 | | 2021-09-30 09:00:00.000000 | 2021-09-30 09:00:35.000000 | | 2 |
+| | | | | | | | | | | | | | | | | | | | | | | | | |
+
+
diff --git a/website/docs/ranger-integrate.md b/website/docs/ranger-integrate.md
new file mode 100644
index 0000000..43c6d30
--- /dev/null
+++ b/website/docs/ranger-integrate.md
@@ -0,0 +1,174 @@
+---
+title: "Ranger Integrate"
+sidebar_position: 7
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+This guide provides quick setup for integrate with Ranger
+
+## Build Ranger Spark plugin from source
+
+For spark 2.4, ranger 2.1 and scala 2.11
+
+```bash
+git clone https://github.com/izhangzhihao/spark-security.git
+cd spark-security
+mvn clean package -Dmaven.javadoc.skip=true -DskipTests -pl :submarine-spark-security -Pspark-2.4 -Pranger-2.1
+```
+
+You can found the jar file in `spark-security/target/submarine-spark-security-0.7.0-SNAPSHOT.jar`
+
+## Setup testing env
+
+### Settings for Ranger
+
+Create `ranger-spark-security.xml` in `$SPARK_HOME/conf` and add the following configurations
+for pointing to the right Apache Ranger admin server.
+
+
+```xml
+
+
+
+
+ ranger.plugin.spark.policy.rest.url
+ https://ranger.com:port
+
+
+
+ ranger.plugin.spark.policy.rest.ssl.config.file
+ /etc/spark/conf/ranger-spark-policymgr-ssl.xml
+
+
+
+ ranger.plugin.spark.service.name
+ cm_hive
+
+
+
+ ranger.plugin.spark.policy.cache.dir
+ /tmp
+
+
+
+ ranger.plugin.spark.policy.pollIntervalMs
+ 5000
+
+
+
+ ranger.plugin.spark.policy.source.impl
+ org.apache.ranger.admin.client.RangerAdminRESTClient
+
+
+
+```
+
+Create `ranger-spark-audit.xml` in `$SPARK_HOME/conf` and add the following configurations
+to enable/disable auditing.
+
+```xml
+
+
+ xasecure.audit.is.enabled
+ true
+
+
+```
+
+Create `ranger-spark-policymgr-ssl.xml` in `$SPARK_HOME/conf`.
+
+```xml
+
+
+ xasecure.policymgr.clientssl.truststore
+ /home/bigdatauser/cm-auto-global_truststore.jks
+
+
+ xasecure.policymgr.clientssl.truststore.credential.file
+ jceks://file/home/bigdatauser/ranger-truststore.jceks
+
+
+ xasecure.policymgr.clientssl.keystore
+ /home/bigdatauser/cm-auto-host_keystore.jks
+
+
+ xasecure.policymgr.clientssl.keystore.credential.file
+ jceks://file/home/bigdatauser/ranger-keystore.jceks
+
+
+ xasecure.policymgr.clientssl.keystore.type
+ jks
+
+
+ xasecure.policymgr.clientssl.truststore.type
+ jks
+
+
+```
+
+### Generate `jceks` file
+
+```bash
+java -cp "/opt/cloudera/parcels/CDH/lib/ranger-hive-plugin/install/lib/*" org.apache.ranger.credentialapi.buildks create sslKeyStore -value 'yourpassword' -provider jceks://file/home/bigdatauser/ranger-keystore.jceks
+java -cp "/opt/cloudera/parcels/CDH/lib/ranger-hive-plugin/install/lib/*" org.apache.ranger.credentialapi.buildks create sslTrustStore -value 'yourpassword' -provider jceks://file/home/bigdatauser/ranger-truststore.jceks
+```
+
+### Config Ranger
+
+skipped
+
+### Testsing using `spark-shell` or `spark-sql`
+
+```bash
+spark-shell --master yarn --deploy-mode client --conf spark.sql.extensions=org.apache.submarine.spark.security.api.RangerSparkSQLExtension --jars=submarine-spark-security-0.7.0-SNAPSHOT.jar --driver-class-path=slib/*
+```
+
+```bash
+spark-sql --master yarn --deploy-mode client --conf spark.sql.extensions=org.apache.submarine.spark.security.api.RangerSparkSQLExtension --jars=submarine-spark-security-0.7.0-SNAPSHOT.jar --driver-class-path=slib/*
+```
+
+### Testing using `spark-submit`
+
+```bash
+spark-submit --master yarn --deploy-mode client --conf spark.sql.extensions=org.apache.submarine.spark.security.api.RangerSparkSQLExtension --jars=/opt/cloudera/parcels/CDH-7.1.6-1.cdh7.1.6.p0.10506313/jars/hive-common-3.1.3000.7.1.6.0-297.jar,/opt/cloudera/parcels/CDH-7.1.6-1.cdh7.1.6.p0.10506313/jars/hive-metastore-3.1.3000.7.1.6.0-297.jar,submarine-spark-security-0.7.0-SNAPSHOT.jar --driver-class-path=slib/* --class com.github.sharpdata.sharpetl.spark.Entrypoint hdfs:///user/admin/demo_workflow/spark-1.0.0-SNAPSHOT.jar single-job --name=test --period=10 --env=qa --once --skip-running=false --property=hdfs:///user/admin/etl-conf/etl.properties
+```
+
+Succeful logs:
+
+```log
+21/12/23 10:10:12 INFO config.RangerConfiguration: addResourceIfReadable(ranger-spark-audit.xml): resource file is file:/etc/spark/conf.cloudera.spark_on_yarn/ranger-spark-audit.xml
+21/12/23 10:10:12 INFO config.RangerConfiguration: addResourceIfReadable(ranger-spark-security.xml): resource file is file:/etc/spark/conf.cloudera.spark_on_yarn/ranger-spark-security.xml
+21/12/23 10:10:12 INFO config.RangerConfiguration: addResourceIfReadable(ranger-spark-policymgr-ssl.xml): resource file is file:/etc/spark/conf.cloudera.spark_on_yarn/ranger-spark-policymgr-ssl.xml
+21/12/23 10:10:12 ERROR config.RangerConfiguration: addResourceIfReadable(ranger-spark-cm_hive-audit.xml): couldn't find resource file location
+21/12/23 10:10:12 ERROR config.RangerConfiguration: addResourceIfReadable(ranger-spark-cm_hive-security.xml): couldn't find resource file location
+21/12/23 10:10:12 ERROR config.RangerConfiguration: addResourceIfReadable(ranger-spark-cm_hive-policymgr-ssl.xml): couldn't find resource file location
+21/12/23 10:10:12 INFO config.RangerPluginConfig: PolicyEngineOptions: { evaluatorType: auto, evaluateDelegateAdminOnly: false, disableContextEnrichers: false, disableCustomConditions: false, disableTagPolicyEvaluation: false, enableTagEnricherWithLocalRefresher: false, disableTrieLookupPrefilter: false, optimizeTrieForRetrieval: false, cacheAuditResult: false }
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AuditProviderFactory: creating..
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AuditProviderFactory: initializing..
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.truststore=/home/bigdatauser/cm-auto-global_truststore.jks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.policy.source.impl=org.apache.ranger.admin.client.RangerAdminRESTClient
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.service.name=cm_hive
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.policy.cache.dir=/tmp
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.keystore.type=jks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.policy.rest.ssl.config.file=/etc/spark/conf/ranger-spark-policymgr-ssl.xml
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.policy.rest.url=https://ranger.com:port/
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.keystore.credential.file=jceks://file/home/bigdatauser/ranger-keystore.jceks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.audit.is.enabled=true
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: ranger.plugin.spark.policy.pollIntervalMs=5000
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.keystore=/home/bigdatauser/cm-auto-host_keystore.jks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.truststore.type=jks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: AUDIT PROPERTY: xasecure.policymgr.clientssl.truststore.credential.file=jceks://file/home/bigdatauser/ranger-truststore.jceks
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: No v3 audit configuration found. Trying v2 audit configurations
+21/12/23 10:10:12 INFO provider.AuditProviderFactory: RangerAsyncAuditCleanup: Waiting to audit cleanup start signal
+21/12/23 10:10:12 INFO service.RangerBasePlugin: Created PolicyRefresher Thread(PolicyRefresher(serviceName=cm_hive)-125)
+21/12/23 10:10:13 INFO util.RangerRolesProvider: RangerRolesProvider(serviceName=cm_hive): found updated version. lastKnownRoleVersion=-1; newVersion=9
+21/12/23 10:10:13 INFO util.PolicyRefresher: PolicyRefresher(serviceName=cm_hive): found updated version. lastKnownVersion=-1; newVersion=189
+21/12/23 10:10:13 INFO policyengine.RangerPolicyRepository: This policy engine contains 23 policy evaluators
+21/12/23 10:10:13 INFO conditionevaluator.RangerScriptConditionEvaluator: ScriptEngine for engineName=[JavaScript] is successfully created
+21/12/23 10:10:13 INFO policyengine.RangerPolicyRepository: This policy engine contains 1 policy evaluators
+21/12/23 10:10:13 INFO contextenricher.RangerTagEnricher: Created RangerTagRefresher Thread(RangerTagRefresher(serviceName=cm_hive)-130)
+21/12/23 10:10:14 INFO contextenricher.RangerTagEnricher: There are no tagged resources for service cm_hive
+21/12/23 10:10:14 INFO contextenricher.RangerTagEnricher$RangerTagRefresher: RangerTagRefresher(serviceName=cm_hive).populateTags() - Updated tags-cache to new version of tags, lastKnownVersion=-1; newVersion=1
+21/12/23 10:10:14 INFO security.RangerSparkPlugin$: Policy cache directory successfully set to /tmp
+```
\ No newline at end of file
diff --git a/website/docs/single-job-guide.md b/website/docs/single-job-guide.md
new file mode 100644
index 0000000..4576c8a
--- /dev/null
+++ b/website/docs/single-job-guide.md
@@ -0,0 +1,64 @@
+
+This guide provides a quick guide for commandline `single-job`
+
+## Introduction
+
+Different from `batch-job`, the command `single-job` only run a single job each time and should be noted as one of arguments when running a job. For example, when running a sample job which reads data from source-table and write them into target-table, the command is as follows:
+
+```bash
+# run single job by `spark-submit`
+spark-submit --class com.github.sharpdata.sharpetl.spark.Entrypoint spark/build/libs/spark-1.0.0-SNAPSHOT.jar single-job --name=source-table --period=1440 --datasource=sales.online_order --default-start-time="2021-09-30 00:00:00" --local --once
+
+# run single job locally
+./gradlew :spark:run --args="single-job --name=source-table --period=1440 --datasource=sales.online_order --default-start-time='2021-09-30 00:00:00' --local --once"
+```
+
+## Parameters
+
+### common command params
+
+1. `--local`
+
+Declare that the job is running in standalone mode. If `--local` not provided, the job will try running with Hive support enabled.
+
+2. `--release-resource`
+
+The function is to automatically close spark session after job completion.
+
+3. `--skip-running`
+
+When there is a flash crash, use `--skip-running` to set last job status(in running state) as failed and start a new one.
+
+4. `--default-start` / `--default-start-time`
+
+Specify the default start time(eg, 20210101000000)/incremental id of this job. If the command is running for the first time, the default time would be the time set by the argument. If not, the argument would not work.
+
+5. `--once`
+
+It means that the job only run one time(for testing usage).
+
+6. `--env`
+
+Specify the default env path: local/test/dev/qa/prod running the job.
+
+7. `--property`
+
+Using specific property file, eg `--property=hdfs:///user/admin/etl-conf/etl.properties`
+
+8. `--override`
+
+Overriding config in properties file, eg `--override=etl.workflow.path=hdfs:///user/hive/sharp-etl,a=b,c=d`
+
+### single-job params
+
+1. `--name`
+
+Specify the name of the job to run and the name is required.
+
+2. `--period`
+
+Specify the period of job execution.
+
+3. `-h` / `--help`
+
+Take an example of parameters and its default value is false.
diff --git a/website/docs/sql-syntax.md b/website/docs/sql-syntax.md
new file mode 100644
index 0000000..ebf8a98
--- /dev/null
+++ b/website/docs/sql-syntax.md
@@ -0,0 +1,33 @@
+---
+title: "SQL syntax"
+sidebar_position: 9
+toc: true
+last_modified_at: 2022-06-14T09:25:57+08:00
+---
+
+
+## About
+
+Sharp-ETL is not a new sql language, but re-using existing,familiar SQL(like PostgresSQL, MSSQL, and any other sql you want!).
+
+
+## Hello World
+
+Let's start by creating a very simple workflow to do the "hello world" using Spark temp table:
+
+```sql
+-- workflow=hello_world
+-- loadType=incremental
+-- logDrivenType=timewindow
+
+-- step=print SUCCESS to console
+-- source=temp
+-- target=console
+
+SELECT 'SUCCESS' AS `RESULT`;
+```
+
+## Parameters
+
+Let's look at a slightly more complex workflow spec with parameters.
+
diff --git a/website/docs/transformer-guide.md b/website/docs/transformer-guide.md
new file mode 100644
index 0000000..992256d
--- /dev/null
+++ b/website/docs/transformer-guide.md
@@ -0,0 +1,228 @@
+---
+title: "transformer guide"
+sidebar_position: 10
+toc: true
+last_modified_at: 2021-12-23T18:25:57-04:00
+---
+
+## transformer的定义和使用
+
+`transformer`是用于满足用户对于特定场景下通过代码逻辑实现的扩展诉求,通过反射加载jar或者文本文件中的scala代码。`transformer`允许在job step中执行一段代码块,通过java class path或者文件名来区分不同的`transformer`,不同的`transformer`会有不同的自定义参数,如:
+
+```sql
+-- step=1
+-- source=transformation
+-- className=com.github.sharpdata.sharpetl.spark.transformation.HttpTransformer
+-- methodName=transform
+-- transformerType=object
+-- url=https:xxxx
+-- connectionName=connection_demo
+-- fieldName=centerIds
+-- jsonPath=$.centers[*].id
+-- splitBy=,
+-- target=variables
+-- checkPoint=false
+-- dateRangeInterval=0
+```
+
+以HttpTransformer为例,该transformer是将从api获得的json类型数据进行解析和落表,还支持url中的动态传参,可以采用variables定义具体参数并调用。在所有使用transformer的step中,`dataSourceType=transformation, methodName=transform, transformerType=object`,className则为定义transformer的类名,除此之外的`url, connectionName, fieldName, jsonPath, splitBy`则为httpTransformer的自定义参数。
+
+
+
+### 自定义transformer的使用
+
+- `JdbcResultSetTransformer`: 该transformer主要用于对source数据库中的表执行`insert`,`update`,`delete`等无返回值的操作
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.JdbcResultSetTransformer
+ -- dbType=yellowbrick
+ -- dbName=bigdata
+ -- methodName=transform
+ -- target=do_nothing
+ -- checkPoint=false
+ -- dateRangeInterval=0
+ delete from demo_table where to_char("HIST_DT", 'yyyyMMdd') = '${TODAY}';
+ ```
+
+ - `dataSourceType`, `className`和`methodName`与前文保持一致
+ - `dbType`和`dbName`用于构建jdbc连接参数,`dbType`为`jdbc transformer`的自定义参数
+ - 该step表示从`demo_table`中删除当天的数据
+
+- `DDLTransformer`: 该transformer主要用于通过建表语句在`hive`或者`yellobrick`建表,需要传入建表语句的路径
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.DDLTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- dbName=bigdata
+ -- dbType=yellowbrick
+ -- ddlPath=/demo_ddl
+ -- target=do_nothing
+ ```
+
+ - `dbType`&`dbName`: 用于构建jdbc连接,目前只支持通过ddl在hive和yb建表
+ - `ddlPath`: 默认值为`/user/hive/sharp-etl/ddl`,为存放ddl的具体路径
+
+- `JobDependencyCheckTransformer`: 该transformer主要用于对job运行时上游依赖job是否运行结束的检测,需要输入`decencies`和`jobName`,检查该jobName是否有依赖job未运行完成
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.JobDependencyCheckTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- dependencies=job1,job2,job3
+ -- jobName=test_dependency_demo
+ -- target=do_nothing
+ ```
+
+ - `dependencies`: 上游依赖job的名称,`jobName`即为需要检测的job名
+
+- `FileCleanTransformer`: 该transformer将删除目标路径下固定格式的文件,需要输入`filePath`和`fileNamePattern`,后者支持正则表达式
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.FileCleanTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- filePath=test_fileClean
+ -- fileNamePattern=((\w*_test_fileClean.txt))
+ -- target=do_nothing
+ ```
+
+ - `filePath`: 文件存储路径
+ - `fileNamePattern`: 文件名,也可传入正则表达式
+
+- `DropExternalTableTransformer`: 该transformer将删除hive和hdfs中以`tableNamePrefix`开头的在`databaseName`数据库中相应`partition`的表和文件, `partition`支持动态传参,可以与`variables`结合使用
+
+ ```sql
+ -- step=2
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.DropExternalTableTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- databaseName=testDB
+ -- tableNamePrefix=pre_ods__
+ -- partition=year=${YEAR},month=${MONTH},day=${7_DAYS_BEFORE}
+ -- target=do_nothing
+ ```
+
+ - `partition`: 需要删除的分区,会删除`day=${7_DAYS_BEFORE}`即7天前的分区文件
+ - `tableNamePrefix`: 需要删除的表前缀
+
+- `DailyJobsSummaryReportTransformer`: 该transformer主要用于发送附件为`dailyReportSummary`的csv邮件,包含当日所有的job,粒度为step,对于失败job,还会汇总具体的失败信息和具体step,主要依赖于`step_log`和`job_log`两张表,需要在配置文件中配置`projectName`和`jobName`(如果不配置,则`dailyReport`的csv文件中没有具体的`projectName`)
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.DailyJobsSummaryReportTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- datasource=hive,yellowbrick
+ -- target=do_nothing
+ -- checkPoint=false
+ -- dateRangeInterval=0
+ ```
+
+ - `datasource`: 写入的数据库类型,用于汇总具体的`errorMessage`
+
+- `CheckAllConnectorStatusTransformer`: 该transformer集成了`kafka restapi`,通过调用接口返回各个`connector`的运行状态,若有`connector`状态为`failed`或`pause`,则会及时预警
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.CheckAllConnectorStatusTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- uri=https://xxx.com:28085
+ -- target=do_nothing
+ ```
+
+ - `uri`: 为具体的`kafka restapi`端口信息,会在代码中拼接真正需要访问的uri信息
+
+- `CheckConnectorStatusTransformer`: 该transformer与`CheckAllConnectorStatusTransformer`一致,均用做monitor,需要指定具体`connector`的名字,与`connectorName`参数联合使用,可以一次输入一个或多个`connector`,通常用在kafka下游任务中的step1,即首先判断`connector`是否正常工作,若`connector`报错则下游任务不会执行,需要在配置文件中配置`kafka.restapi`参数,用于构建具体api
+
+ ```sql
+ -- step=1
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.CheckConnectorStatusTransformer
+ -- methodName=transform
+ -- transformerType=object
+ -- connectorName=connector1, connector2
+ -- target=do_nothing
+ ```
+
+ - `connectorName`: 需要monitor的`connector`名字,通常为接入表时的`source connector`和`sink connector`
+
+- `EnsureSinkConnectorFinished`: 该transformer用于确认`sink connector`即`consumer`端是否消费完数据,是否将数据全部写入hive/hdfs,一般接在`CheckConnectorStatusTransformer`后,为step2,若`consumer`还未写入完成,会等待5分钟,若5分钟后还未完成,则下游任务不会执行
+
+ ```sql
+ -- step=2
+ -- source=transformation
+ -- className=com.github.sharpdata.sharpetl.spark.transformation.EnsureSinkConnectorFinished
+ -- methodName=transform
+ -- transformerType=object
+ -- group=consumer-group1
+ -- kafkaTopic=topic1
+ -- target=do_nothing
+ ```
+
+ - `group`: 为`kafka consumer group`名称,通常一个`group`对应一个`topic`,若一个`consumer group`包含多个`topic`,需要指定`kafkaTopic`名称
+ - `kafkaTopic`: `topic`名称,若不指定,则默认为检查`group`里全部`topic`中的`message`是否全被消费
+
+
+
+### 如何自定义transformer
+
+可以在`com.github.sharpdata.sharpetl.spark.transformation`包中定制`transformer`,通过`override transform`方法实现具体逻辑,而`transformer`的调用则主要通过反射进行,只需在sql脚本中指定具体的`transformer`名称和相应参数即可。
+
+### 加载外部transformer
+
+框架还支持动态加载scala脚本文件,一个示例如下:
+
+```scala
+import com.fasterxml.jackson.annotation.JsonInclude.Include
+import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
+import com.fasterxml.jackson.module.scala.DefaultScalaModule
+import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
+import com.jayway.jsonpath.{JsonPath, PathNotFoundException}
+import com.github.sharpdata.sharpetl.core.util.ETLLogger
+import com.github.sharpdata.sharpetl.spark.common.ETLSparkSession
+import com.github.sharpdata.sharpetl.spark.transformation._
+import com.github.sharpdata.sharpetl.spark.utils.HttpStatusUtils
+import net.minidev.json.JSONArray
+import org.apache.http.impl.client._
+import org.apache.http.util.EntityUtils
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.sql.functions._
+import org.apache.spark.sql.types.{StringType, StructField, StructType}
+
+object LoopHttpTransformer extends Transformer {
+
+ val mapper = new ObjectMapper with ScalaObjectMapper
+ mapper.setSerializationInclusion(Include.NON_ABSENT)
+ mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
+ mapper.registerModule(DefaultScalaModule)
+
+ override def transform(args: scala.collection.mutable.Map[String, String]): DataFrame = {
+ ???
+ }
+}
+```
+
+### Pro tips
+
+:::tip
+编写scala脚本实现的transformer,需要注意几个要点:
+
+* 文件开头不可带有package信息,在sql中调用时,会根据package.fileName中的fileName找到scala脚本
+* sql在引用scala脚本时需要设置`transformerType=dynamic_object`,除此之外使用方式与jar中的transformer相同
+* 部分package需使用全名例如 `scala.collection.mutable.Map[String, String]`, 而不是 `mutable.Map[String, String]`
+* 如果你遇到了类似于`illegal cyclic reference involving object InterfaceAudience`的错误,你需要spark-submit option `--conf "spark.executor.userClassPathFirst=true" --conf "spark.driver.userClassPathFirst=true"`
+* 如果你遇到了错误`object x is not a member of package x`,你需要使用全引用例如 `scala.collection.mutable.Map[String, String]`
+:::
diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js
new file mode 100644
index 0000000..a38e884
--- /dev/null
+++ b/website/docusaurus.config.js
@@ -0,0 +1,139 @@
+// @ts-check
+// Note: type annotations allow type checking and IDEs autocompletion
+
+/** @type {import('@docusaurus/types').Config} */
+const config = {
+ title: 'Sharp ETL website',
+ tagline: 'Write ETL using your favorite SQL dialects',
+ url: 'https://sharpdata.github.io/SharpETL/',
+ baseUrl: '/SharpETL/',
+ onBrokenLinks: 'ignore',
+ onBrokenMarkdownLinks: 'warn',
+ favicon: 'img/favicon.ico',
+
+ organizationName: 'SharpData',
+ projectName: 'SharpETL',
+
+ presets: [
+ [
+ 'classic',
+ /** @type {import('@docusaurus/preset-classic').Options} */
+ ({
+ docs: {
+ sidebarPath: require.resolve('./sidebars.js'),
+ editUrl:
+ 'https://github.com/SharpData/SharpETL/tree/pages/website',
+ },
+ blog: {
+ showReadingTime: true,
+ editUrl:
+ 'https://github.com/SharpData/SharpETL/tree/pages/website',
+ },
+ theme: {
+ customCss: require.resolve('./src/css/custom.css'),
+ },
+ gtag: {
+ trackingID: 'G-59696D1TKP',
+ anonymizeIP: false
+ },
+ }),
+ ],
+ ],
+
+ themeConfig:
+ /** @type {import('@docusaurus/preset-classic').ThemeConfig} */
+ {
+ navbar: {
+ title: 'Sharp ETL',
+ logo: {
+ alt: 'Sharp ETL Logo',
+ src: 'img/sharp_etl.png',
+ },
+ items: [
+ {
+ type: 'doc',
+ docId: 'quick-start-guide',
+ position: 'left',
+ label: 'Docs',
+ },
+ { to: 'blog', label: 'Blog', position: 'left' },
+ // { to: 'concept', label: 'Concept', position: 'left' },
+ {
+ href: 'https://github.com/SharpData/SharpETL',
+ label: 'GitHub',
+ position: 'right',
+ },
+ ],
+ },
+ footer: {
+ style: 'dark',
+ links: [
+ {
+ title: 'Learn',
+ items: [
+ {
+ label: 'Style Guide',
+ to: 'docs/',
+ },
+ {
+ label: 'Documents',
+ to: 'docs/',
+ },
+ ],
+ },
+ {
+ title: 'More',
+ items: [
+ {
+ label: 'Blog',
+ to: 'blog',
+ },
+ {
+ label: 'GitHub',
+ href: 'https://github.com/SharpData/SharpETL',
+ },
+ ],
+ },
+ ],
+ // logo: {
+ // alt: 'Sharp ETL Logo',
+ // src: '/img/sharp_etl.png',
+ // href: 'https://github.com/SharpData/SharpETL',
+ // width: 128,
+ // height: 128,
+ // },
+ copyright: `Copyright © ${new Date().getFullYear()} Sharp Data`,
+ },
+ prism: {
+ theme: require('prism-react-renderer/themes/dracula'),
+ additionalLanguages: ['java', 'scala', 'sql'],
+ },
+ announcementBar: {
+ id: 'announcementBar-2',
+ content:
+ '⭐️ If you like Sharp ETL, give it a star on GitHub ! ⭐',
+ },
+ algolia: {
+ // The application ID provided by Algolia
+ appId: 'LC78S7LHSQ',
+
+ // Public API key: it is safe to commit it
+ apiKey: '0ebe9ea80f7f9aac056d184401394f79',
+
+ indexName: 'sharpetl',
+
+ // Optional: see doc section below
+ contextualSearch: true,
+
+ // Optional: Algolia search parameters
+ searchParameters: {},
+
+ // Optional: path for search page that enabled by default (`false` to disable it)
+ searchPagePath: 'search',
+
+ //... other Algolia params
+ },
+ },
+};
+
+module.exports = config;
diff --git a/website/package.json b/website/package.json
new file mode 100644
index 0000000..9c9d7a3
--- /dev/null
+++ b/website/package.json
@@ -0,0 +1,58 @@
+{
+ "name": "sharp-etl-site",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "docusaurus": "docusaurus",
+ "start": "docusaurus start",
+ "build": "docusaurus build",
+ "swizzle": "docusaurus swizzle",
+ "deploy": "docusaurus deploy",
+ "clear": "docusaurus clear",
+ "serve": "docusaurus serve",
+ "write-translations": "docusaurus write-translations",
+ "write-heading-ids": "docusaurus write-heading-ids",
+ "ci": "yarn lint && yarn format:diff",
+ "lint": "eslint --cache \"**/*.js\" && stylelint \"**/*.css\"",
+ "format": "prettier --config .prettierrc --write \"**/*.{js,jsx,ts,tsx,md,mdx}\"",
+ "format:diff": "prettier --config .prettierrc --list-different \"**/*.{js,jsx,ts,tsx,md,mdx}\""
+ },
+ "dependencies": {
+ "@docusaurus/core": "2.0.1",
+ "@docusaurus/preset-classic": "2.0.1",
+ "@docusaurus/theme-search-algolia": "^2.0.1",
+ "@mdx-js/react": "^1.6.22",
+ "clsx": "^1.2.1",
+ "prism-react-renderer": "^1.2.1",
+ "react": "^17.0.2",
+ "react-dom": "^17.0.2"
+ },
+ "devDependencies": {
+ "@babel/eslint-parser": "^7.18.2",
+ "eslint": "^8.19.0",
+ "eslint-config-airbnb": "^19.0.4",
+ "eslint-config-prettier": "^8.5.0",
+ "eslint-plugin-header": "^3.1.1",
+ "eslint-plugin-import": "^2.26.0",
+ "eslint-plugin-jsx-a11y": "^6.6.0",
+ "eslint-plugin-react": "^7.30.1",
+ "eslint-plugin-react-hooks": "^4.6.0",
+ "prettier": "^2.7.1",
+ "stylelint": "^14.9.1"
+ },
+ "browserslist": {
+ "production": [
+ ">0.5%",
+ "not dead",
+ "not op_mini all"
+ ],
+ "development": [
+ "last 1 chrome version",
+ "last 1 firefox version",
+ "last 1 safari version"
+ ]
+ },
+ "engines": {
+ "node": ">=16.14"
+ }
+}
diff --git a/website/sidebars.js b/website/sidebars.js
new file mode 100644
index 0000000..cdfc3da
--- /dev/null
+++ b/website/sidebars.js
@@ -0,0 +1,63 @@
+
+
+module.exports = {
+ // concept: [{ type: 'autogenerated', dirName: '.' }],
+ docs: [
+ {
+ type: 'category',
+ label: 'Quick Start',
+ collapsed: false,
+ items: [
+ 'quick-start-guide',
+ 'docker-setup',
+ 'developer-setup',
+ 'end-to-end-showcase',
+ 'end-to-end-showcase-postgres'
+ ],
+ }, {
+ type: 'category',
+ label: 'Configuration',
+ collapsed: false,
+ items: [
+ 'properties-file-config',
+ 'encrypt-in-properties-file',
+ 'datasource',
+ ],
+ }, {
+ type: 'category',
+ label: 'Excel Template',
+ collapsed: false,
+ items: [
+ 'ods-config-template',
+ 'excel-template-ods',
+ 'dwd-config-template',
+ 'excel-template-dwd',
+ ],
+ }, {
+ type: 'category',
+ label: 'Command',
+ collapsed: false,
+ items: [
+ 'single-job-guide',
+ 'batch-job-guide',
+ ],
+ }, {
+ type: 'category',
+ label: 'Extension',
+ collapsed: false,
+ items: [
+ 'transformer-guide',
+ 'UDF-guide',
+ 'custom-datasource-guide',
+ ],
+ }, {
+ type: 'category',
+ label: 'Security',
+ collapsed: false,
+ items: [
+ 'ranger-integrate',
+ ],
+ }
+ ],
+};
+
diff --git a/website/src/css/custom.css b/website/src/css/custom.css
new file mode 100644
index 0000000..c665f35
--- /dev/null
+++ b/website/src/css/custom.css
@@ -0,0 +1,198 @@
+ :root {
+ /*
+ See css var + hsl color palette technique:
+ https://blog.maximeheckel.com/posts/the-power-of-composition-with-css-variables/
+ */
+ --site-primary-hue-saturation: 167 68%;
+ --site-primary-hue-saturation-light: 167 56%;
+ /* do we really need this extra one? */
+ --site-color-favorite-background: #f6fdfd;
+ --site-color-tooltip: #fff;
+ --site-color-tooltip-background: #353738;
+ --site-color-svg-icon-favorite: #e9669e;
+ --site-color-checkbox-checked-bg: hsl(167deg 56% 73% / 25%);
+ --site-color-feedback-background: #fff;
+ --docusaurus-highlighted-code-line-bg: rgb(0 0 0 / 10%);
+ /* Use a darker color to ensure contrast, ideally we don't need important */
+ --ifm-breadcrumb-color-active: var(--ifm-color-primary-darker) !important;
+ --ifm-menu-color-active: var(--ifm-color-primary-darker) !important;
+ }
+
+ html[data-theme='dark'] {
+ --site-color-feedback-background: #f0f8ff;
+ --site-color-favorite-background: #1d1e1e;
+ --site-color-checkbox-checked-bg: hsl(167deg 56% 73% / 10%);
+ --docusaurus-highlighted-code-line-bg: rgb(66 66 66 / 35%);
+ }
+
+ /*
+ * This selector will be dynamically replaced by the color generator. Don't put
+ * other properties here.
+ */
+ [data-theme='light'] {
+ --ifm-color-primary: hsl(var(--site-primary-hue-saturation) 30%);
+ --ifm-color-primary-dark: hsl(var(--site-primary-hue-saturation) 26%);
+ --ifm-color-primary-darker: hsl(var(--site-primary-hue-saturation) 23%);
+ --ifm-color-primary-darkest: hsl(var(--site-primary-hue-saturation) 17%);
+
+ --ifm-color-primary-light: hsl(var(--site-primary-hue-saturation-light) 39%);
+ --ifm-color-primary-lighter: hsl(var(--site-primary-hue-saturation-light) 47%);
+ --ifm-color-primary-lightest: hsl(var(--site-primary-hue-saturation-light) 58%);
+ }
+
+ /*
+ * This selector will be dynamically replaced by the color generator. Don't put
+ * other properties here.
+ */
+ [data-theme='dark'] {
+ --ifm-color-primary: hsl(var(--site-primary-hue-saturation) 45%);
+ --ifm-color-primary-dark: hsl(var(--site-primary-hue-saturation) 41%);
+ --ifm-color-primary-darker: hsl(var(--site-primary-hue-saturation) 38%);
+ --ifm-color-primary-darkest: hsl(var(--site-primary-hue-saturation) 32%);
+
+ --ifm-color-primary-light: hsl(var(--site-primary-hue-saturation-light) 54%);
+ --ifm-color-primary-lighter: hsl(var(--site-primary-hue-saturation-light) 62%);
+ --ifm-color-primary-lightest: hsl(var(--site-primary-hue-saturation-light) 73%);
+ }
+
+ .header-github-link:hover {
+ opacity: 0.6;
+ }
+
+ .header-github-link::before {
+ content: '';
+ width: 24px;
+ height: 24px;
+ display: flex;
+ background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") no-repeat;
+ }
+
+ [data-theme='dark'] .header-github-link::before {
+ background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") no-repeat;
+ }
+
+ .footer--dark {
+ --ifm-footer-background-color: #2b3137;
+ }
+
+ .unique-tabs .tabs__item {
+ line-height: 16px;
+ margin-right: 8px;
+ }
+
+ .unique-tabs .tabs__item--active {
+ border: 0;
+ color: #fff;
+ border-radius: var(--ifm-global-radius);
+ background-color: var(--ifm-tabs-color-active);
+ }
+
+ [data-theme='light'] .themedDocusaurus [fill='#FFFF50'] {
+ fill: greenyellow;
+ }
+
+ [data-theme='dark'] .themedDocusaurus [fill='#FFFF50'] {
+ fill: seagreen;
+ }
+
+ [data-theme='light'] .DocSearch {
+ /* --docsearch-primary-color: var(--ifm-color-primary); */
+ /* --docsearch-text-color: var(--ifm-font-color-base); */
+ --docsearch-muted-color: var(--ifm-color-emphasis-700);
+ --docsearch-container-background: rgb(94 100 112 / 70%);
+ /* Modal */
+ --docsearch-modal-background: var(--ifm-color-secondary-lighter);
+ /* Search box */
+ --docsearch-searchbox-background: var(--ifm-color-secondary);
+ --docsearch-searchbox-focus-background: var(--ifm-color-white);
+ /* Hit */
+ --docsearch-hit-color: var(--ifm-font-color-base);
+ --docsearch-hit-active-color: var(--ifm-color-white);
+ --docsearch-hit-background: var(--ifm-color-white);
+ /* Footer */
+ --docsearch-footer-background: var(--ifm-color-white);
+ }
+
+ [data-theme='dark'] .DocSearch {
+ --docsearch-text-color: var(--ifm-font-color-base);
+ --docsearch-muted-color: var(--ifm-color-secondary-darkest);
+ --docsearch-container-background: rgb(47 55 69 / 70%);
+ /* Modal */
+ --docsearch-modal-background: var(--ifm-background-color);
+ /* Search box */
+ --docsearch-searchbox-background: var(--ifm-background-color);
+ --docsearch-searchbox-focus-background: var(--ifm-color-black);
+ /* Hit */
+ --docsearch-hit-color: var(--ifm-font-color-base);
+ --docsearch-hit-active-color: var(--ifm-color-white);
+ --docsearch-hit-background: var(--ifm-color-emphasis-100);
+ /* Footer */
+ --docsearch-footer-background: var(--ifm-background-surface-color);
+ --docsearch-key-gradient: linear-gradient(-26.5deg,
+ var(--ifm-color-emphasis-200) 0%,
+ var(--ifm-color-emphasis-100) 100%);
+ }
+
+ div[class^='announcementBar_'] {
+ --site-announcement-bar-stripe-color1: hsl(var(--site-primary-hue-saturation) 85%);
+ --site-announcement-bar-stripe-color2: hsl(var(--site-primary-hue-saturation) 95%);
+ background: repeating-linear-gradient(35deg,
+ var(--site-announcement-bar-stripe-color1),
+ var(--site-announcement-bar-stripe-color1) 20px,
+ var(--site-announcement-bar-stripe-color2) 10px,
+ var(--site-announcement-bar-stripe-color2) 40px);
+ font-weight: bold;
+ }
+
+ .screen-reader-only {
+ border: 0;
+ clip: rect(0 0 0 0);
+ clip-path: polygon(0 0, 0 0, 0 0);
+ height: 1px;
+ margin: -1px;
+ overflow: hidden;
+ padding: 0;
+ position: absolute;
+ width: 1px;
+ white-space: nowrap;
+ }
+
+ [data-theme='light'] img[src$='#gh-dark-mode-only'],
+ [data-theme='dark'] img[src$='#gh-light-mode-only'] {
+ display: none;
+ }
+
+ /* Used to test CSS insertion order */
+ .test-marker-site-custom-css-unique-rule {
+ content: 'site-custom-css-unique-rule';
+ }
+
+ .video-container {
+ position: relative;
+ overflow: hidden;
+ width: 100%;
+ max-width: 560px;
+ margin: 0 auto;
+ }
+
+ .yt-lite>.lty-playbtn {
+ cursor: pointer;
+ border: 0;
+ }
+
+ .dropdown-separator {
+ margin: 0.3rem 0;
+ }
+
+ .dropdown-archived-versions {
+ font-size: 0.875rem;
+ padding: 0.2rem 0.5rem;
+ }
+
+ .code-block-error-line {
+ background-color: #ff000020;
+ display: block;
+ margin: 0 calc(-1 * var(--ifm-pre-padding));
+ padding: 0 var(--ifm-pre-padding);
+ border-left: 3px solid #ff000080;
+ }
\ No newline at end of file
diff --git a/website/src/pages/index.js b/website/src/pages/index.js
new file mode 100644
index 0000000..4272434
--- /dev/null
+++ b/website/src/pages/index.js
@@ -0,0 +1,100 @@
+
+import React from 'react';
+import clsx from 'clsx';
+import Layout from '@theme/Layout';
+import Link from '@docusaurus/Link';
+import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import styles from './styles.module.css';
+
+const features = [
+ {
+ title: 'Easy to Use',
+ imageUrl: 'img/undraw_docusaurus_mountain.svg',
+ description: (
+ <>
+ Docusaurus was designed from the ground up to be easily installed and
+ used to get your website up and running quickly.
+ >
+ ),
+ },
+ {
+ title: 'Focus on What Matters',
+ imageUrl: 'img/undraw_docusaurus_tree.svg',
+ description: (
+ <>
+ Docusaurus lets you focus on your docs, and we'll do the chores. Go
+ ahead and move your docs into the docs directory.
+ >
+ ),
+ },
+ {
+ title: 'Powered by SQL',
+ imageUrl: 'img/undraw_docusaurus_react.svg',
+ description: (
+ <>
+ Write ETL using your favorite SQL dialects, no custom SQL syntax at all.
+ >
+ ),
+ },
+];
+
+function Feature({imageUrl, title, description}) {
+ const imgUrl = useBaseUrl(imageUrl);
+ return (
+
+ {imgUrl && (
+
+
+
+ )}
+
{title}
+
{description}
+
+ );
+}
+
+export default function Home() {
+ const context = useDocusaurusContext();
+ const {siteConfig = {}} = context;
+ return (
+
+
+
+
{siteConfig.title}
+
{siteConfig.tagline}
+
+
+ Get Started
+
+
+
+
+
+ {features && features.length > 0 && (
+
+
+
+ {features.map(({title, imageUrl, description}) => (
+
+ ))}
+
+
+
+ )}
+
+
+ );
+}
diff --git a/website/src/pages/markdown-page.md b/website/src/pages/markdown-page.md
new file mode 100644
index 0000000..9756c5b
--- /dev/null
+++ b/website/src/pages/markdown-page.md
@@ -0,0 +1,7 @@
+---
+title: Markdown page example
+---
+
+# Markdown page example
+
+You don't need React to write simple standalone pages.
diff --git a/website/src/pages/styles.module.css b/website/src/pages/styles.module.css
new file mode 100644
index 0000000..0d48549
--- /dev/null
+++ b/website/src/pages/styles.module.css
@@ -0,0 +1,36 @@
+
+/**
+ * CSS files with the .module.css suffix will be treated as CSS modules
+ * and scoped locally.
+ */
+
+.heroBanner {
+ padding: 4rem 0;
+ text-align: center;
+ position: relative;
+ overflow: hidden;
+}
+
+@media screen and (max-width: 996px) {
+ .heroBanner {
+ padding: 2rem;
+ }
+}
+
+.buttons {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+.features {
+ display: flex;
+ align-items: center;
+ padding: 2rem 0;
+ width: 100%;
+}
+
+.featureImage {
+ height: 200px;
+ width: 200px;
+}
diff --git a/website/static/.nojekyll b/website/static/.nojekyll
new file mode 100644
index 0000000..e69de29
diff --git a/website/static/assets/images/github.png b/website/static/assets/images/github.png
new file mode 100644
index 0000000..db85527
Binary files /dev/null and b/website/static/assets/images/github.png differ
diff --git a/website/static/assets/images/logdriven-1.svg b/website/static/assets/images/logdriven-1.svg
new file mode 100644
index 0000000..154c009
--- /dev/null
+++ b/website/static/assets/images/logdriven-1.svg
@@ -0,0 +1,3 @@
+
+
+Success Success Success Success Success 理想情况 Success Success Success Success 现实情况 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 Success... 补数据 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 Text is not SVG - cannot display
\ No newline at end of file
diff --git a/website/static/assets/images/logdriven-2.svg b/website/static/assets/images/logdriven-2.svg
new file mode 100644
index 0000000..2ec2566
--- /dev/null
+++ b/website/static/assets/images/logdriven-2.svg
@@ -0,0 +1,3 @@
+
+
+Success Success Success 日志驱动
(如果重试可成功) 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 Success... Success... Success 日志驱动
(如果重试不成功) 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 2022 02 02 2022 02 02 2022 02 02 2022 02 02 Success 日志驱动
(人工介入修复后) 2022 02 01 2022 02 02 2022 02 03 2022 02 22 2022 02 23 2022 02 02 2022 02 02 Success Success... Success... Text is not SVG - cannot display
\ No newline at end of file
diff --git a/website/static/assets/images/logdriven-3.svg b/website/static/assets/images/logdriven-3.svg
new file mode 100644
index 0000000..ce51fe2
--- /dev/null
+++ b/website/static/assets/images/logdriven-3.svg
@@ -0,0 +1,3 @@
+
+
+Job 开始 确定dataRangeStart 有曾经成功的任务 取--default-start 否 取最大dataRangeEnd 是 dataRangeStart 计算待执行任务队列(以time-based为例) schedule的次数 = (‘当前时间’ - `dataRangeStart`) / `period` (取整)
schedule的次数 = (‘当前时间’ - `dataRangeStart`) / `p... > 0 执行任务 写入job_log, 任务状态运行中 根据SQL的step依次执行 更新step_log 更新job_log状态,成功或失败 执行待执行队列中的下一个任务 Text is not SVG - cannot display
\ No newline at end of file
diff --git a/website/static/assets/images/logdriven.png b/website/static/assets/images/logdriven.png
new file mode 100644
index 0000000..bb247b4
Binary files /dev/null and b/website/static/assets/images/logdriven.png differ
diff --git a/website/static/img/favicon.ico b/website/static/img/favicon.ico
new file mode 100644
index 0000000..d3d2520
Binary files /dev/null and b/website/static/img/favicon.ico differ
diff --git a/website/static/img/sharp_etl.png b/website/static/img/sharp_etl.png
new file mode 100644
index 0000000..e5654c6
Binary files /dev/null and b/website/static/img/sharp_etl.png differ
diff --git a/website/static/img/undraw_docusaurus_mountain.svg b/website/static/img/undraw_docusaurus_mountain.svg
new file mode 100644
index 0000000..af961c4
--- /dev/null
+++ b/website/static/img/undraw_docusaurus_mountain.svg
@@ -0,0 +1,171 @@
+
+ Easy to Use
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/website/static/img/undraw_docusaurus_react.svg b/website/static/img/undraw_docusaurus_react.svg
new file mode 100644
index 0000000..94b5cf0
--- /dev/null
+++ b/website/static/img/undraw_docusaurus_react.svg
@@ -0,0 +1,170 @@
+
+ Powered by React
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/website/static/img/undraw_docusaurus_tree.svg b/website/static/img/undraw_docusaurus_tree.svg
new file mode 100644
index 0000000..d9161d3
--- /dev/null
+++ b/website/static/img/undraw_docusaurus_tree.svg
@@ -0,0 +1,40 @@
+
+ Focus on What Matters
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/website/yarn.lock b/website/yarn.lock
new file mode 100644
index 0000000..372ba62
--- /dev/null
+++ b/website/yarn.lock
@@ -0,0 +1,8674 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@algolia/autocomplete-core@1.7.1":
+ version "1.7.1"
+ resolved "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz#025538b8a9564a9f3dd5bcf8a236d6951c76c7d1"
+ integrity sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg==
+ dependencies:
+ "@algolia/autocomplete-shared" "1.7.1"
+
+"@algolia/autocomplete-preset-algolia@1.7.1":
+ version "1.7.1"
+ resolved "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz#7dadc5607097766478014ae2e9e1c9c4b3f957c8"
+ integrity sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg==
+ dependencies:
+ "@algolia/autocomplete-shared" "1.7.1"
+
+"@algolia/autocomplete-shared@1.7.1":
+ version "1.7.1"
+ resolved "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz#95c3a0b4b78858fed730cf9c755b7d1cd0c82c74"
+ integrity sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg==
+
+"@algolia/cache-browser-local-storage@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.14.1.tgz#a0b85a6c3fe3d5c49fca01b16f00b41bf38a918c"
+ integrity sha512-BBdibsPn3hLBajc/NRAtHEeoXsw+ziSGR/3bqRNB5puUuwKPQZSE2MaMVWSADnlc3KV3bEj4xsfKOVLJyfJSPQ==
+ dependencies:
+ "@algolia/cache-common" "4.14.1"
+
+"@algolia/cache-common@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.14.1.tgz#11d44a6442f83deb3629a04c20df8408088f6449"
+ integrity sha512-XhAzm0Sm3D3DuOWUyDoVSXZ/RjYMvI1rbki+QH4ODAVaHDWVhMhg3IJPv3gIbBQnEQdtPdBhsf2hyPxAu28E5w==
+
+"@algolia/cache-in-memory@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.14.1.tgz#68ede8520f054bc65938209b59962056ae5b56c7"
+ integrity sha512-fVUu7N1hYb/zZYfV9Krlij70NwS+8bQm5vmDJyfp0+9FjSjz2V7wj1CUxvaY8ZcgoBPj9ehQ8sRuqSM2m5OPww==
+ dependencies:
+ "@algolia/cache-common" "4.14.1"
+
+"@algolia/client-account@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.14.1.tgz#b92e091f698630c49ec7df48816ae75af3cbac40"
+ integrity sha512-Zm4+PN3bsBPhv1dKKwzBaRGzf0G1JcjjSTpE231L7Z7LsEDcFDW4E6L5ctwMz3SliSBeL/j1ghmaunJrZlkRIg==
+ dependencies:
+ "@algolia/client-common" "4.14.1"
+ "@algolia/client-search" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+"@algolia/client-analytics@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.14.1.tgz#aca3436775f608a6141cc81899e1d75ef030efa2"
+ integrity sha512-EhZLR0ezBZx7ZGkwzj7OTvnI8j2Alyv1ByC0Mx48qh3KqRhVwMFm/Uf34zAv4Dum2PTFin41Y4smAvAypth9nQ==
+ dependencies:
+ "@algolia/client-common" "4.14.1"
+ "@algolia/client-search" "4.14.1"
+ "@algolia/requester-common" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+"@algolia/client-common@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.14.1.tgz#2709bddf934a3545cd9feecc0591e9285fbed7c2"
+ integrity sha512-WDwziD7Rt1yCRDfONmeLOfh1Lt8uOy6Vn7dma171KOH9NN3q8yDQpOhPqdFOCz1j3GC1FfIZxaC0YEOIobZ2lg==
+ dependencies:
+ "@algolia/requester-common" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+"@algolia/client-personalization@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.14.1.tgz#58f0b85b8f6d531e13877a099f54513ac2bec154"
+ integrity sha512-D4eeW7bTi769PWcEYZO+QiKuUXFOC5zK5Iy83Ey6FHqS7m5TXws5MP1rmETE018lTXeYq2NSHWp/F07fRRg0RA==
+ dependencies:
+ "@algolia/client-common" "4.14.1"
+ "@algolia/requester-common" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+"@algolia/client-search@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.14.1.tgz#44bfc65b3e6939b725f8f97aad725593f2a4ad7f"
+ integrity sha512-K6XrdIIQq8a3o+kCedj5slUVzA1aKttae4mLzwnY0bS7tYduv1IQggi9Sz8gOG6/MMyKMB4IwYqr47t/0z4Vxw==
+ dependencies:
+ "@algolia/client-common" "4.14.1"
+ "@algolia/requester-common" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+"@algolia/events@^4.0.1":
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz#fd39e7477e7bc703d7f893b556f676c032af3950"
+ integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==
+
+"@algolia/logger-common@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.14.1.tgz#acbd36b66e3b408f99cacfb581ad3bd28defcc28"
+ integrity sha512-58CK87wTjUWI1QNXc3nFDQ7EXBi28NoLufXE9sMjng2fAL1wPdyO+KFD8KTBoXOZnJWflPj5F7p6jLyGAfgvcQ==
+
+"@algolia/logger-console@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.14.1.tgz#7e7d7486d71ccfe38e63234626931083592149d2"
+ integrity sha512-not+VwH1Dx2B/BaN+4+4+YnGRBJ9lduNz2qbMCTxZ4yFHb+84j4ewHRPBTtEmibn7caVCPybdTKfHLQhimSBLQ==
+ dependencies:
+ "@algolia/logger-common" "4.14.1"
+
+"@algolia/requester-browser-xhr@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.14.1.tgz#9e683dc0916afae221bf946255a998b06830be78"
+ integrity sha512-mpH6QsFBbXjTy9+iU86Rcdt9LxS7GA/tWhGMr0+Ap8+4Za5+ELToz0PC7euVeVOcclgGGi7gbjOAmf6k8b10iA==
+ dependencies:
+ "@algolia/requester-common" "4.14.1"
+
+"@algolia/requester-common@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.14.1.tgz#b07ffa00ae0cf61442dcda71a3209051fed130d8"
+ integrity sha512-EbXBKrfYcX5/JJfaw7IZxhWlbUtjd5Chs+Alrfc4tutgRQn4dmImWS07n3iffwJcYdOWY1eRrnfBK5BwopuN5A==
+
+"@algolia/requester-node-http@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.14.1.tgz#5e5f4ff55deb5aa0e92f3105d77299de744b1471"
+ integrity sha512-/sbRqL9P8aVuYUG50BgpCbdJyyCS7fia+sQIx9d1DiGPO7hunwLaEyR4H7JDHc/PLKdVEPygJx3rnbJWix4Btg==
+ dependencies:
+ "@algolia/requester-common" "4.14.1"
+
+"@algolia/transporter@4.14.1":
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.14.1.tgz#7eca8568ff710d9d1a7bbd3c1dafbbf44a6143f5"
+ integrity sha512-xbmoIqszFDOCCZqizBQ2TNHcGtjZX7EkJCzABsrokA0WqtfZzClFmtc+tZYgtEiyAfIF70alTegG19poQGdkvg==
+ dependencies:
+ "@algolia/cache-common" "4.14.1"
+ "@algolia/logger-common" "4.14.1"
+ "@algolia/requester-common" "4.14.1"
+
+"@ampproject/remapping@^2.1.0":
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d"
+ integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==
+ dependencies:
+ "@jridgewell/gen-mapping" "^0.1.0"
+ "@jridgewell/trace-mapping" "^0.3.9"
+
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a"
+ integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==
+ dependencies:
+ "@babel/highlight" "^7.18.6"
+
+"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.18.8":
+ version "7.18.8"
+ resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.18.8.tgz#2483f565faca607b8535590e84e7de323f27764d"
+ integrity sha512-HSmX4WZPPK3FUxYp7g2T6EyO8j96HlZJlxmKPSh6KAcqwyDrfx7hKjXpAW/0FhFfTJsR0Yt4lAjLI2coMptIHQ==
+
+"@babel/core@7.12.9":
+ version "7.12.9"
+ resolved "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz#fd450c4ec10cdbb980e2928b7aa7a28484593fc8"
+ integrity sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==
+ dependencies:
+ "@babel/code-frame" "^7.10.4"
+ "@babel/generator" "^7.12.5"
+ "@babel/helper-module-transforms" "^7.12.1"
+ "@babel/helpers" "^7.12.5"
+ "@babel/parser" "^7.12.7"
+ "@babel/template" "^7.12.7"
+ "@babel/traverse" "^7.12.9"
+ "@babel/types" "^7.12.7"
+ convert-source-map "^1.7.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.1"
+ json5 "^2.1.2"
+ lodash "^4.17.19"
+ resolve "^1.3.2"
+ semver "^5.4.1"
+ source-map "^0.5.0"
+
+"@babel/core@^7.18.5", "@babel/core@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/core/-/core-7.18.9.tgz#805461f967c77ff46c74ca0460ccf4fe933ddd59"
+ integrity sha512-1LIb1eL8APMy91/IMW+31ckrfBM4yCoLaVzoDhZUKSM4cu1L1nIidyxkCgzPAgrC5WEz36IPEr/eSeSF9pIn+g==
+ dependencies:
+ "@ampproject/remapping" "^2.1.0"
+ "@babel/code-frame" "^7.18.6"
+ "@babel/generator" "^7.18.9"
+ "@babel/helper-compilation-targets" "^7.18.9"
+ "@babel/helper-module-transforms" "^7.18.9"
+ "@babel/helpers" "^7.18.9"
+ "@babel/parser" "^7.18.9"
+ "@babel/template" "^7.18.6"
+ "@babel/traverse" "^7.18.9"
+ "@babel/types" "^7.18.9"
+ convert-source-map "^1.7.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.2"
+ json5 "^2.2.1"
+ semver "^6.3.0"
+
+"@babel/eslint-parser@^7.18.2":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.18.9.tgz#255a63796819a97b7578751bb08ab9f2a375a031"
+ integrity sha512-KzSGpMBggz4fKbRbWLNyPVTuQr6cmCcBhOyXTw/fieOVaw5oYAwcAj4a7UKcDYCPxQq+CG1NCDZH9e2JTXquiQ==
+ dependencies:
+ eslint-scope "^5.1.1"
+ eslint-visitor-keys "^2.1.0"
+ semver "^6.3.0"
+
+"@babel/generator@^7.12.5", "@babel/generator@^7.18.7", "@babel/generator@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.18.9.tgz#68337e9ea8044d6ddc690fb29acae39359cca0a5"
+ integrity sha512-wt5Naw6lJrL1/SGkipMiFxJjtyczUWTP38deiP1PO60HsBjDeKk08CGC3S8iVuvf0FmTdgKwU1KIXzSKL1G0Ug==
+ dependencies:
+ "@babel/types" "^7.18.9"
+ "@jridgewell/gen-mapping" "^0.3.2"
+ jsesc "^2.5.1"
+
+"@babel/helper-annotate-as-pure@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb"
+ integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb"
+ integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==
+ dependencies:
+ "@babel/helper-explode-assignable-expression" "^7.18.6"
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.9.tgz#69e64f57b524cde3e5ff6cc5a9f4a387ee5563bf"
+ integrity sha512-tzLCyVmqUiFlcFoAPLA/gL9TeYrF61VLNtb+hvkuVaB5SUjW7jcfrglBIX1vUIoT7CLP3bBlIMeyEsIl2eFQNg==
+ dependencies:
+ "@babel/compat-data" "^7.18.8"
+ "@babel/helper-validator-option" "^7.18.6"
+ browserslist "^4.20.2"
+ semver "^6.3.0"
+
+"@babel/helper-create-class-features-plugin@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.9.tgz#d802ee16a64a9e824fcbf0a2ffc92f19d58550ce"
+ integrity sha512-WvypNAYaVh23QcjpMR24CwZY2Nz6hqdOcFdPbNpV56hL5H6KiFheO7Xm1aPdlLQ7d5emYZX7VZwPp9x3z+2opw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-function-name" "^7.18.9"
+ "@babel/helper-member-expression-to-functions" "^7.18.9"
+ "@babel/helper-optimise-call-expression" "^7.18.6"
+ "@babel/helper-replace-supers" "^7.18.9"
+ "@babel/helper-split-export-declaration" "^7.18.6"
+
+"@babel/helper-create-regexp-features-plugin@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.18.6.tgz#3e35f4e04acbbf25f1b3534a657610a000543d3c"
+ integrity sha512-7LcpH1wnQLGrI+4v+nPp+zUvIkF9x0ddv1Hkdue10tg3gmRnLy97DXh4STiOf1qeIInyD69Qv5kKSZzKD8B/7A==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ regexpu-core "^5.1.0"
+
+"@babel/helper-define-polyfill-provider@^0.3.1":
+ version "0.3.1"
+ resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz#52411b445bdb2e676869e5a74960d2d3826d2665"
+ integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.13.0"
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.13.0"
+ "@babel/traverse" "^7.13.0"
+ debug "^4.1.1"
+ lodash.debounce "^4.0.8"
+ resolve "^1.14.2"
+ semver "^6.1.2"
+
+"@babel/helper-environment-visitor@^7.18.6", "@babel/helper-environment-visitor@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be"
+ integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==
+
+"@babel/helper-explode-assignable-expression@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096"
+ integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-function-name@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.18.9.tgz#940e6084a55dee867d33b4e487da2676365e86b0"
+ integrity sha512-fJgWlZt7nxGksJS9a0XdSaI4XvpExnNIgRP+rVefWh5U7BL8pPuir6SJUmFKRfjWQ51OtWSzwOxhaH/EBWWc0A==
+ dependencies:
+ "@babel/template" "^7.18.6"
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-hoist-variables@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678"
+ integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-member-expression-to-functions@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815"
+ integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg==
+ dependencies:
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e"
+ integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.18.9.tgz#5a1079c005135ed627442df31a42887e80fcb712"
+ integrity sha512-KYNqY0ICwfv19b31XzvmI/mfcylOzbLtowkw+mfvGPAQ3kfCnMLYbED3YecL5tPd8nAYFQFAd6JHp2LxZk/J1g==
+ dependencies:
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-module-imports" "^7.18.6"
+ "@babel/helper-simple-access" "^7.18.6"
+ "@babel/helper-split-export-declaration" "^7.18.6"
+ "@babel/helper-validator-identifier" "^7.18.6"
+ "@babel/template" "^7.18.6"
+ "@babel/traverse" "^7.18.9"
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-optimise-call-expression@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe"
+ integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-plugin-utils@7.10.4":
+ version "7.10.4"
+ resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375"
+ integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==
+
+"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.9.tgz#4b8aea3b069d8cb8a72cdfe28ddf5ceca695ef2f"
+ integrity sha512-aBXPT3bmtLryXaoJLyYPXPlSD4p1ld9aYeR+sJNOZjJJGiOpb+fKfh3NkcCu7J54nUJwCERPBExCCpyCOHnu/w==
+
+"@babel/helper-remap-async-to-generator@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519"
+ integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-wrap-function" "^7.18.9"
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.18.9.tgz#1092e002feca980fbbb0bd4d51b74a65c6a500e6"
+ integrity sha512-dNsWibVI4lNT6HiuOIBr1oyxo40HvIVmbwPUm3XZ7wMh4k2WxrxTqZwSqw/eEmXDS9np0ey5M2bz9tBmO9c+YQ==
+ dependencies:
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-member-expression-to-functions" "^7.18.9"
+ "@babel/helper-optimise-call-expression" "^7.18.6"
+ "@babel/traverse" "^7.18.9"
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-simple-access@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea"
+ integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-skip-transparent-expression-wrappers@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818"
+ integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw==
+ dependencies:
+ "@babel/types" "^7.18.9"
+
+"@babel/helper-split-export-declaration@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075"
+ integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==
+ dependencies:
+ "@babel/types" "^7.18.6"
+
+"@babel/helper-validator-identifier@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz#9c97e30d31b2b8c72a1d08984f2ca9b574d7a076"
+ integrity sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==
+
+"@babel/helper-validator-option@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8"
+ integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==
+
+"@babel/helper-wrap-function@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.18.9.tgz#ae1feddc6ebbaa2fd79346b77821c3bd73a39646"
+ integrity sha512-cG2ru3TRAL6a60tfQflpEfs4ldiPwF6YW3zfJiRgmoFVIaC1vGnBBgatfec+ZUziPHkHSaXAuEck3Cdkf3eRpQ==
+ dependencies:
+ "@babel/helper-function-name" "^7.18.9"
+ "@babel/template" "^7.18.6"
+ "@babel/traverse" "^7.18.9"
+ "@babel/types" "^7.18.9"
+
+"@babel/helpers@^7.12.5", "@babel/helpers@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.18.9.tgz#4bef3b893f253a1eced04516824ede94dcfe7ff9"
+ integrity sha512-Jf5a+rbrLoR4eNdUmnFu8cN5eNJT6qdTdOg5IHIzq87WwyRw9PwguLFOWYgktN/60IP4fgDUawJvs7PjQIzELQ==
+ dependencies:
+ "@babel/template" "^7.18.6"
+ "@babel/traverse" "^7.18.9"
+ "@babel/types" "^7.18.9"
+
+"@babel/highlight@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf"
+ integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.18.6"
+ chalk "^2.0.0"
+ js-tokens "^4.0.0"
+
+"@babel/parser@^7.12.7", "@babel/parser@^7.18.6", "@babel/parser@^7.18.8", "@babel/parser@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.18.9.tgz#f2dde0c682ccc264a9a8595efd030a5cc8fd2539"
+ integrity sha512-9uJveS9eY9DJ0t64YbIBZICtJy8a5QrDEVdiLCG97fVLpDTpGX7t8mMSb6OWw6Lrnjqj4O8zwjELX3dhoMgiBg==
+
+"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2"
+ integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50"
+ integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9"
+ "@babel/plugin-proposal-optional-chaining" "^7.18.9"
+
+"@babel/plugin-proposal-async-generator-functions@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.18.6.tgz#aedac81e6fc12bb643374656dd5f2605bf743d17"
+ integrity sha512-WAz4R9bvozx4qwf74M+sfqPMKfSqwM0phxPTR6iJIi8robgzXwkEgmeJG1gEKhm6sDqT/U9aV3lfcqybIpev8w==
+ dependencies:
+ "@babel/helper-environment-visitor" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-remap-async-to-generator" "^7.18.6"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+
+"@babel/plugin-proposal-class-properties@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3"
+ integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-proposal-class-static-block@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020"
+ integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+
+"@babel/plugin-proposal-dynamic-import@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94"
+ integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+
+"@babel/plugin-proposal-export-namespace-from@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203"
+ integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+
+"@babel/plugin-proposal-json-strings@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b"
+ integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+
+"@babel/plugin-proposal-logical-assignment-operators@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23"
+ integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+
+"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1"
+ integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+
+"@babel/plugin-proposal-numeric-separator@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75"
+ integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+
+"@babel/plugin-proposal-object-rest-spread@7.12.1":
+ version "7.12.1"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz#def9bd03cea0f9b72283dac0ec22d289c7691069"
+ integrity sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
+ "@babel/plugin-transform-parameters" "^7.12.1"
+
+"@babel/plugin-proposal-object-rest-spread@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7"
+ integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q==
+ dependencies:
+ "@babel/compat-data" "^7.18.8"
+ "@babel/helper-compilation-targets" "^7.18.9"
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-transform-parameters" "^7.18.8"
+
+"@babel/plugin-proposal-optional-catch-binding@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb"
+ integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+
+"@babel/plugin-proposal-optional-chaining@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993"
+ integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+
+"@babel/plugin-proposal-private-methods@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea"
+ integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-proposal-private-property-in-object@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503"
+ integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-create-class-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+
+"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e"
+ integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-syntax-async-generators@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d"
+ integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-class-properties@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10"
+ integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-class-static-block@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406"
+ integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-dynamic-import@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3"
+ integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-export-namespace-from@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a"
+ integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/plugin-syntax-import-assertions@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4"
+ integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-syntax-json-strings@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a"
+ integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-jsx@7.12.1":
+ version "7.12.1"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz#9d9d357cc818aa7ae7935917c1257f67677a0926"
+ integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-jsx@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0"
+ integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-syntax-logical-assignment-operators@^7.10.4":
+ version "7.10.4"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699"
+ integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9"
+ integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-numeric-separator@^7.10.4":
+ version "7.10.4"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97"
+ integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-object-rest-spread@7.8.3", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871"
+ integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-catch-binding@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1"
+ integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-chaining@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a"
+ integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-private-property-in-object@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad"
+ integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-top-level-await@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c"
+ integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-typescript@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285"
+ integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-arrow-functions@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe"
+ integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-async-to-generator@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615"
+ integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==
+ dependencies:
+ "@babel/helper-module-imports" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-remap-async-to-generator" "^7.18.6"
+
+"@babel/plugin-transform-block-scoped-functions@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8"
+ integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-block-scoping@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d"
+ integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-classes@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.9.tgz#90818efc5b9746879b869d5ce83eb2aa48bbc3da"
+ integrity sha512-EkRQxsxoytpTlKJmSPYrsOMjCILacAjtSVkd4gChEe2kXjFCun3yohhW5I7plXJhCemM0gKsaGMcO8tinvCA5g==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-function-name" "^7.18.9"
+ "@babel/helper-optimise-call-expression" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-replace-supers" "^7.18.9"
+ "@babel/helper-split-export-declaration" "^7.18.6"
+ globals "^11.1.0"
+
+"@babel/plugin-transform-computed-properties@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e"
+ integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-destructuring@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.9.tgz#68906549c021cb231bee1db21d3b5b095f8ee292"
+ integrity sha512-p5VCYNddPLkZTq4XymQIaIfZNJwT9YsjkPOhkVEqt6QIpQFZVM9IltqqYpOEkJoN1DPznmxUDyZ5CTZs/ZCuHA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8"
+ integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-duplicate-keys@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e"
+ integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-exponentiation-operator@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd"
+ integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==
+ dependencies:
+ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-for-of@^7.18.8":
+ version "7.18.8"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1"
+ integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-function-name@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0"
+ integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.18.9"
+ "@babel/helper-function-name" "^7.18.9"
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-literals@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc"
+ integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-member-expression-literals@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e"
+ integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-modules-amd@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21"
+ integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-commonjs@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883"
+ integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-simple-access" "^7.18.6"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-systemjs@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.9.tgz#545df284a7ac6a05125e3e405e536c5853099a06"
+ integrity sha512-zY/VSIbbqtoRoJKo2cDTewL364jSlZGvn0LKOf9ntbfxOvjfmyrdtEEOAdswOswhZEb8UH3jDkCKHd1sPgsS0A==
+ dependencies:
+ "@babel/helper-hoist-variables" "^7.18.6"
+ "@babel/helper-module-transforms" "^7.18.9"
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-validator-identifier" "^7.18.6"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-umd@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9"
+ integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-named-capturing-groups-regex@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.18.6.tgz#c89bfbc7cc6805d692f3a49bc5fc1b630007246d"
+ integrity sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-new-target@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8"
+ integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-object-super@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c"
+ integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-replace-supers" "^7.18.6"
+
+"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.18.8":
+ version "7.18.8"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a"
+ integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-property-literals@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3"
+ integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-react-constant-elements@^7.17.12":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.9.tgz#ff6aeedd38f57ba6b41dcf824fcc8bcedb3e783f"
+ integrity sha512-IrTYh1I3YCEL1trjknnlLKTp5JggjzhKl/d3ibzPc97JhpFcDTr38Jdek/oX4cFbS6By0bXJcOkpRvJ5ZHK2wQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-react-display-name@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415"
+ integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-react-jsx-development@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5"
+ integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==
+ dependencies:
+ "@babel/plugin-transform-react-jsx" "^7.18.6"
+
+"@babel/plugin-transform-react-jsx@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.18.6.tgz#2721e96d31df96e3b7ad48ff446995d26bc028ff"
+ integrity sha512-Mz7xMPxoy9kPS/JScj6fJs03TZ/fZ1dJPlMjRAgTaxaS0fUBk8FV/A2rRgfPsVCZqALNwMexD+0Uaf5zlcKPpw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-module-imports" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-jsx" "^7.18.6"
+ "@babel/types" "^7.18.6"
+
+"@babel/plugin-transform-react-pure-annotations@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844"
+ integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-regenerator@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73"
+ integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ regenerator-transform "^0.15.0"
+
+"@babel/plugin-transform-reserved-words@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a"
+ integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-runtime@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.9.tgz#d9e4b1b25719307bfafbf43065ed7fb3a83adb8f"
+ integrity sha512-wS8uJwBt7/b/mzE13ktsJdmS4JP/j7PQSaADtnb4I2wL0zK51MQ0pmF8/Jy0wUIS96fr+fXT6S/ifiPXnvrlSg==
+ dependencies:
+ "@babel/helper-module-imports" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.9"
+ babel-plugin-polyfill-corejs2 "^0.3.1"
+ babel-plugin-polyfill-corejs3 "^0.5.2"
+ babel-plugin-polyfill-regenerator "^0.3.1"
+ semver "^6.3.0"
+
+"@babel/plugin-transform-shorthand-properties@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9"
+ integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-spread@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.18.9.tgz#6ea7a6297740f381c540ac56caf75b05b74fb664"
+ integrity sha512-39Q814wyoOPtIB/qGopNIL9xDChOE1pNU0ZY5dO0owhiVt/5kFm4li+/bBtwc7QotG0u5EPzqhZdjMtmqBqyQA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9"
+
+"@babel/plugin-transform-sticky-regex@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc"
+ integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-template-literals@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e"
+ integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-typeof-symbol@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0"
+ integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.9"
+
+"@babel/plugin-transform-typescript@^7.18.6":
+ version "7.18.8"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.18.8.tgz#303feb7a920e650f2213ef37b36bbf327e6fa5a0"
+ integrity sha512-p2xM8HI83UObjsZGofMV/EdYjamsDm6MoN3hXPYIT0+gxIoopE+B7rPYKAxfrz9K9PK7JafTTjqYC6qipLExYA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/plugin-syntax-typescript" "^7.18.6"
+
+"@babel/plugin-transform-unicode-escapes@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.6.tgz#0d01fb7fb2243ae1c033f65f6e3b4be78db75f27"
+ integrity sha512-XNRwQUXYMP7VLuy54cr/KS/WeL3AZeORhrmeZ7iewgu+X2eBqmpaLI/hzqr9ZxCeUoq0ASK4GUzSM0BDhZkLFw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-unicode-regex@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca"
+ integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/preset-env@^7.18.2", "@babel/preset-env@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.18.9.tgz#9b3425140d724fbe590322017466580844c7eaff"
+ integrity sha512-75pt/q95cMIHWssYtyfjVlvI+QEZQThQbKvR9xH+F/Agtw/s4Wfc2V9Bwd/P39VtixB7oWxGdH4GteTTwYJWMg==
+ dependencies:
+ "@babel/compat-data" "^7.18.8"
+ "@babel/helper-compilation-targets" "^7.18.9"
+ "@babel/helper-plugin-utils" "^7.18.9"
+ "@babel/helper-validator-option" "^7.18.6"
+ "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6"
+ "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9"
+ "@babel/plugin-proposal-async-generator-functions" "^7.18.6"
+ "@babel/plugin-proposal-class-properties" "^7.18.6"
+ "@babel/plugin-proposal-class-static-block" "^7.18.6"
+ "@babel/plugin-proposal-dynamic-import" "^7.18.6"
+ "@babel/plugin-proposal-export-namespace-from" "^7.18.9"
+ "@babel/plugin-proposal-json-strings" "^7.18.6"
+ "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6"
+ "@babel/plugin-proposal-numeric-separator" "^7.18.6"
+ "@babel/plugin-proposal-object-rest-spread" "^7.18.9"
+ "@babel/plugin-proposal-optional-catch-binding" "^7.18.6"
+ "@babel/plugin-proposal-optional-chaining" "^7.18.9"
+ "@babel/plugin-proposal-private-methods" "^7.18.6"
+ "@babel/plugin-proposal-private-property-in-object" "^7.18.6"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.18.6"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/plugin-syntax-class-properties" "^7.12.13"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+ "@babel/plugin-syntax-import-assertions" "^7.18.6"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+ "@babel/plugin-syntax-top-level-await" "^7.14.5"
+ "@babel/plugin-transform-arrow-functions" "^7.18.6"
+ "@babel/plugin-transform-async-to-generator" "^7.18.6"
+ "@babel/plugin-transform-block-scoped-functions" "^7.18.6"
+ "@babel/plugin-transform-block-scoping" "^7.18.9"
+ "@babel/plugin-transform-classes" "^7.18.9"
+ "@babel/plugin-transform-computed-properties" "^7.18.9"
+ "@babel/plugin-transform-destructuring" "^7.18.9"
+ "@babel/plugin-transform-dotall-regex" "^7.18.6"
+ "@babel/plugin-transform-duplicate-keys" "^7.18.9"
+ "@babel/plugin-transform-exponentiation-operator" "^7.18.6"
+ "@babel/plugin-transform-for-of" "^7.18.8"
+ "@babel/plugin-transform-function-name" "^7.18.9"
+ "@babel/plugin-transform-literals" "^7.18.9"
+ "@babel/plugin-transform-member-expression-literals" "^7.18.6"
+ "@babel/plugin-transform-modules-amd" "^7.18.6"
+ "@babel/plugin-transform-modules-commonjs" "^7.18.6"
+ "@babel/plugin-transform-modules-systemjs" "^7.18.9"
+ "@babel/plugin-transform-modules-umd" "^7.18.6"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.18.6"
+ "@babel/plugin-transform-new-target" "^7.18.6"
+ "@babel/plugin-transform-object-super" "^7.18.6"
+ "@babel/plugin-transform-parameters" "^7.18.8"
+ "@babel/plugin-transform-property-literals" "^7.18.6"
+ "@babel/plugin-transform-regenerator" "^7.18.6"
+ "@babel/plugin-transform-reserved-words" "^7.18.6"
+ "@babel/plugin-transform-shorthand-properties" "^7.18.6"
+ "@babel/plugin-transform-spread" "^7.18.9"
+ "@babel/plugin-transform-sticky-regex" "^7.18.6"
+ "@babel/plugin-transform-template-literals" "^7.18.9"
+ "@babel/plugin-transform-typeof-symbol" "^7.18.9"
+ "@babel/plugin-transform-unicode-escapes" "^7.18.6"
+ "@babel/plugin-transform-unicode-regex" "^7.18.6"
+ "@babel/preset-modules" "^0.1.5"
+ "@babel/types" "^7.18.9"
+ babel-plugin-polyfill-corejs2 "^0.3.1"
+ babel-plugin-polyfill-corejs3 "^0.5.2"
+ babel-plugin-polyfill-regenerator "^0.3.1"
+ core-js-compat "^3.22.1"
+ semver "^6.3.0"
+
+"@babel/preset-modules@^0.1.5":
+ version "0.1.5"
+ resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9"
+ integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.4.4"
+ "@babel/plugin-transform-dotall-regex" "^7.4.4"
+ "@babel/types" "^7.4.4"
+ esutils "^2.0.2"
+
+"@babel/preset-react@^7.17.12", "@babel/preset-react@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d"
+ integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-validator-option" "^7.18.6"
+ "@babel/plugin-transform-react-display-name" "^7.18.6"
+ "@babel/plugin-transform-react-jsx" "^7.18.6"
+ "@babel/plugin-transform-react-jsx-development" "^7.18.6"
+ "@babel/plugin-transform-react-pure-annotations" "^7.18.6"
+
+"@babel/preset-typescript@^7.17.12", "@babel/preset-typescript@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399"
+ integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.18.6"
+ "@babel/helper-validator-option" "^7.18.6"
+ "@babel/plugin-transform-typescript" "^7.18.6"
+
+"@babel/runtime-corejs3@^7.10.2", "@babel/runtime-corejs3@^7.18.6":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.18.9.tgz#7bacecd1cb2dd694eacd32a91fcf7021c20770ae"
+ integrity sha512-qZEWeccZCrHA2Au4/X05QW5CMdm4VjUDCrGq5gf1ZDcM4hRqreKrtwAn7yci9zfgAS9apvnsFXiGBHBAxZdK9A==
+ dependencies:
+ core-js-pure "^3.20.2"
+ regenerator-runtime "^0.13.4"
+
+"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.8.4":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.18.9.tgz#b4fcfce55db3d2e5e080d2490f608a3b9f407f4a"
+ integrity sha512-lkqXDcvlFT5rvEjiu6+QYO+1GXrEHRo2LOtS7E4GtX5ESIZOgepqsZBVIj6Pv+a6zqsya9VCgiK1KAK4BvJDAw==
+ dependencies:
+ regenerator-runtime "^0.13.4"
+
+"@babel/template@^7.12.7", "@babel/template@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.npmjs.org/@babel/template/-/template-7.18.6.tgz#1283f4993e00b929d6e2d3c72fdc9168a2977a31"
+ integrity sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw==
+ dependencies:
+ "@babel/code-frame" "^7.18.6"
+ "@babel/parser" "^7.18.6"
+ "@babel/types" "^7.18.6"
+
+"@babel/traverse@^7.12.9", "@babel/traverse@^7.13.0", "@babel/traverse@^7.18.8", "@babel/traverse@^7.18.9":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.18.9.tgz#deeff3e8f1bad9786874cb2feda7a2d77a904f98"
+ integrity sha512-LcPAnujXGwBgv3/WHv01pHtb2tihcyW1XuL9wd7jqh1Z8AQkTd+QVjMrMijrln0T7ED3UXLIy36P9Ao7W75rYg==
+ dependencies:
+ "@babel/code-frame" "^7.18.6"
+ "@babel/generator" "^7.18.9"
+ "@babel/helper-environment-visitor" "^7.18.9"
+ "@babel/helper-function-name" "^7.18.9"
+ "@babel/helper-hoist-variables" "^7.18.6"
+ "@babel/helper-split-export-declaration" "^7.18.6"
+ "@babel/parser" "^7.18.9"
+ "@babel/types" "^7.18.9"
+ debug "^4.1.0"
+ globals "^11.1.0"
+
+"@babel/types@^7.12.7", "@babel/types@^7.18.4", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.4.4":
+ version "7.18.9"
+ resolved "https://registry.npmjs.org/@babel/types/-/types-7.18.9.tgz#7148d64ba133d8d73a41b3172ac4b83a1452205f"
+ integrity sha512-WwMLAg2MvJmt/rKEVQBBhIVffMmnilX4oe0sRe7iPOHIGsqpruFHHdrfj4O1CMMtgMtCU4oPafZjDPCRgO57Wg==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.18.6"
+ to-fast-properties "^2.0.0"
+
+"@colors/colors@1.5.0":
+ version "1.5.0"
+ resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9"
+ integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==
+
+"@csstools/selector-specificity@^2.0.1":
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36"
+ integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg==
+
+"@docsearch/css@3.1.1":
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/@docsearch/css/-/css-3.1.1.tgz#e0976bf995e383f8ee8657306311b9cb95016330"
+ integrity sha512-utLgg7E1agqQeqCJn05DWC7XXMk4tMUUnL7MZupcknRu2OzGN13qwey2qA/0NAKkVBGugiWtON0+rlU0QIPojg==
+
+"@docsearch/react@^3.1.1":
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/@docsearch/react/-/react-3.1.1.tgz#3dffb5db8cf9eb95d6e732cf038264bfc10191ed"
+ integrity sha512-cfoql4qvtsVRqBMYxhlGNpvyy/KlCoPqjIsJSZYqYf9AplZncKjLBTcwBu6RXFMVCe30cIFljniI4OjqAU67pQ==
+ dependencies:
+ "@algolia/autocomplete-core" "1.7.1"
+ "@algolia/autocomplete-preset-algolia" "1.7.1"
+ "@docsearch/css" "3.1.1"
+ algoliasearch "^4.0.0"
+
+"@docusaurus/core@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/core/-/core-2.0.1.tgz#a2b0d653e8f18eacddda4778a46b638dd1f0f45c"
+ integrity sha512-Prd46TtZdiixlTl8a+h9bI5HegkfREjSNkrX2rVEwJZeziSz4ya+l7QDnbnCB2XbxEG8cveFo/F9q5lixolDtQ==
+ dependencies:
+ "@babel/core" "^7.18.6"
+ "@babel/generator" "^7.18.7"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+ "@babel/plugin-transform-runtime" "^7.18.6"
+ "@babel/preset-env" "^7.18.6"
+ "@babel/preset-react" "^7.18.6"
+ "@babel/preset-typescript" "^7.18.6"
+ "@babel/runtime" "^7.18.6"
+ "@babel/runtime-corejs3" "^7.18.6"
+ "@babel/traverse" "^7.18.8"
+ "@docusaurus/cssnano-preset" "2.0.1"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/react-loadable" "5.5.2"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-common" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ "@slorber/static-site-generator-webpack-plugin" "^4.0.7"
+ "@svgr/webpack" "^6.2.1"
+ autoprefixer "^10.4.7"
+ babel-loader "^8.2.5"
+ babel-plugin-dynamic-import-node "^2.3.3"
+ boxen "^6.2.1"
+ chalk "^4.1.2"
+ chokidar "^3.5.3"
+ clean-css "^5.3.0"
+ cli-table3 "^0.6.2"
+ combine-promises "^1.1.0"
+ commander "^5.1.0"
+ copy-webpack-plugin "^11.0.0"
+ core-js "^3.23.3"
+ css-loader "^6.7.1"
+ css-minimizer-webpack-plugin "^4.0.0"
+ cssnano "^5.1.12"
+ del "^6.1.1"
+ detect-port "^1.3.0"
+ escape-html "^1.0.3"
+ eta "^1.12.3"
+ file-loader "^6.2.0"
+ fs-extra "^10.1.0"
+ html-minifier-terser "^6.1.0"
+ html-tags "^3.2.0"
+ html-webpack-plugin "^5.5.0"
+ import-fresh "^3.3.0"
+ leven "^3.1.0"
+ lodash "^4.17.21"
+ mini-css-extract-plugin "^2.6.1"
+ postcss "^8.4.14"
+ postcss-loader "^7.0.0"
+ prompts "^2.4.2"
+ react-dev-utils "^12.0.1"
+ react-helmet-async "^1.3.0"
+ react-loadable "npm:@docusaurus/react-loadable@5.5.2"
+ react-loadable-ssr-addon-v5-slorber "^1.0.1"
+ react-router "^5.3.3"
+ react-router-config "^5.1.1"
+ react-router-dom "^5.3.3"
+ rtl-detect "^1.0.4"
+ semver "^7.3.7"
+ serve-handler "^6.1.3"
+ shelljs "^0.8.5"
+ terser-webpack-plugin "^5.3.3"
+ tslib "^2.4.0"
+ update-notifier "^5.1.0"
+ url-loader "^4.1.1"
+ wait-on "^6.0.1"
+ webpack "^5.73.0"
+ webpack-bundle-analyzer "^4.5.0"
+ webpack-dev-server "^4.9.3"
+ webpack-merge "^5.8.0"
+ webpackbar "^5.0.2"
+
+"@docusaurus/cssnano-preset@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.1.tgz#4d0c49338cf3aa88c5bd7cffbf77654db8e1e3b2"
+ integrity sha512-MCJ6rRmlqLmlCsZIoIxOxDb0rYzIPEm9PYpsBW+CGNnbk+x8xK+11hnrxzvXHqDRNpxrq3Kq2jYUmg/DkqE6vg==
+ dependencies:
+ cssnano-preset-advanced "^5.3.8"
+ postcss "^8.4.14"
+ postcss-sort-media-queries "^4.2.1"
+ tslib "^2.4.0"
+
+"@docusaurus/logger@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.1.tgz#78a940a333d2f654fd9dea24db2c962034d4b1ff"
+ integrity sha512-wIWseCKko1w/WARcDjO3N/XoJ0q/VE42AthP0eNAfEazDjJ94NXbaI6wuUsuY/bMg6hTKGVIpphjj2LoX3g6dA==
+ dependencies:
+ chalk "^4.1.2"
+ tslib "^2.4.0"
+
+"@docusaurus/mdx-loader@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.0.1.tgz#cc73690ca5d356687d9e75740560b4159cd5cdb5"
+ integrity sha512-tdNeljdilXCmhbaEND3SAgsqaw/oh7v9onT5yrIrL26OSk2AFwd+MIi4R8jt8vq33M0R4rz2wpknm0fQIkDdvQ==
+ dependencies:
+ "@babel/parser" "^7.18.8"
+ "@babel/traverse" "^7.18.8"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@mdx-js/mdx" "^1.6.22"
+ escape-html "^1.0.3"
+ file-loader "^6.2.0"
+ fs-extra "^10.1.0"
+ image-size "^1.0.1"
+ mdast-util-to-string "^2.0.0"
+ remark-emoji "^2.2.0"
+ stringify-object "^3.3.0"
+ tslib "^2.4.0"
+ unified "^9.2.2"
+ unist-util-visit "^2.0.3"
+ url-loader "^4.1.1"
+ webpack "^5.73.0"
+
+"@docusaurus/module-type-aliases@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.0.1.tgz#44d7132297bedae0890695b0e7ebbe14a73e26d1"
+ integrity sha512-f888ylnxHAM/3T8p1lx08+lTc6/g7AweSRfRuZvrVhHXj3Tz/nTTxaP6gPTGkJK7WLqTagpar/IGP6/74IBbkg==
+ dependencies:
+ "@docusaurus/react-loadable" "5.5.2"
+ "@docusaurus/types" "2.0.1"
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router-config" "*"
+ "@types/react-router-dom" "*"
+ react-helmet-async "*"
+ react-loadable "npm:@docusaurus/react-loadable@5.5.2"
+
+"@docusaurus/plugin-content-blog@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.1.tgz#a37065e18ffd26e022ffb16a21ff28199140729e"
+ integrity sha512-/4ua3iFYcpwgpeYgHnhVGROB/ybnauLH2+rICb4vz/+Gn1hjAmGXVYq1fk8g49zGs3uxx5nc0H5bL9P0g977IQ==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-common" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ cheerio "^1.0.0-rc.12"
+ feed "^4.2.2"
+ fs-extra "^10.1.0"
+ lodash "^4.17.21"
+ reading-time "^1.5.0"
+ tslib "^2.4.0"
+ unist-util-visit "^2.0.3"
+ utility-types "^3.10.0"
+ webpack "^5.73.0"
+
+"@docusaurus/plugin-content-docs@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.1.tgz#4059591b4bff617e744e856ca680674b27c0b98a"
+ integrity sha512-2qeBWRy1EjgnXdwAO6/csDIS1UVNmhmtk/bQ2s9jqjpwM8YVgZ8QVdkxFAMWXgZWDQdwWwdP1rnmoEelE4HknQ==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/module-type-aliases" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ "@types/react-router-config" "^5.0.6"
+ combine-promises "^1.1.0"
+ fs-extra "^10.1.0"
+ import-fresh "^3.3.0"
+ js-yaml "^4.1.0"
+ lodash "^4.17.21"
+ tslib "^2.4.0"
+ utility-types "^3.10.0"
+ webpack "^5.73.0"
+
+"@docusaurus/plugin-content-pages@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.1.tgz#013f2e66f80d19b5c95a2d941d67c7cdb67b7191"
+ integrity sha512-6apSVeJENnNecAH5cm5VnRqR103M6qSI6IuiP7tVfD5H4AWrfDNkvJQV2+R2PIq3bGrwmX4fcXl1x4g0oo7iwA==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ fs-extra "^10.1.0"
+ tslib "^2.4.0"
+ webpack "^5.73.0"
+
+"@docusaurus/plugin-debug@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.0.1.tgz#2b2a473f8e01fd356e32236f753665b48209bcd4"
+ integrity sha512-jpZBT5HK7SWx1LRQyv9d14i44vSsKXGZsSPA2ndth5HykHJsiAj9Fwl1AtzmtGYuBmI+iXQyOd4MAMHd4ZZ1tg==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ fs-extra "^10.1.0"
+ react-json-view "^1.21.3"
+ tslib "^2.4.0"
+
+"@docusaurus/plugin-google-analytics@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.1.tgz#e3b84237aad2e94dcd1cf1810c1c9bc3d94f186d"
+ integrity sha512-d5qb+ZeQcg1Czoxc+RacETjLdp2sN/TAd7PGN/GrvtijCdgNmvVAtZ9QgajBTG0YbJFVPTeZ39ad2bpoOexX0w==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ tslib "^2.4.0"
+
+"@docusaurus/plugin-google-gtag@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.1.tgz#4cbcf9d520f7ec8124679fbe00867f2299a2f6bb"
+ integrity sha512-qiRufJe2FvIyzICbkjm4VbVCI1hyEju/CebfDKkKh2ZtV4q6DM1WZG7D6VoQSXL8MrMFB895gipOM4BwdM8VsQ==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ tslib "^2.4.0"
+
+"@docusaurus/plugin-sitemap@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.1.tgz#6f8edb82b745b040d6b1495e2798396f63e50289"
+ integrity sha512-KcYuIUIp2JPzUf+Xa7W2BSsjLgN1/0h+VAz7D/C3RYjAgC5ApPX8wO+TECmGfunl/m7WKGUmLabfOon/as64kQ==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-common" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ fs-extra "^10.1.0"
+ sitemap "^7.1.1"
+ tslib "^2.4.0"
+
+"@docusaurus/preset-classic@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.0.1.tgz#21a806e16b61026d2a0efa6ca97e17397065d894"
+ integrity sha512-nOoniTg46My1qdDlLWeFs55uEmxOJ+9WMF8KKG8KMCu5LAvpemMi7rQd4x8Tw+xiPHZ/sQzH9JmPTMPRE4QGPw==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/plugin-content-blog" "2.0.1"
+ "@docusaurus/plugin-content-docs" "2.0.1"
+ "@docusaurus/plugin-content-pages" "2.0.1"
+ "@docusaurus/plugin-debug" "2.0.1"
+ "@docusaurus/plugin-google-analytics" "2.0.1"
+ "@docusaurus/plugin-google-gtag" "2.0.1"
+ "@docusaurus/plugin-sitemap" "2.0.1"
+ "@docusaurus/theme-classic" "2.0.1"
+ "@docusaurus/theme-common" "2.0.1"
+ "@docusaurus/theme-search-algolia" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+
+"@docusaurus/react-loadable@5.5.2", "react-loadable@npm:@docusaurus/react-loadable@5.5.2":
+ version "5.5.2"
+ resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz#81aae0db81ecafbdaee3651f12804580868fa6ce"
+ integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==
+ dependencies:
+ "@types/react" "*"
+ prop-types "^15.6.2"
+
+"@docusaurus/theme-classic@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.0.1.tgz#043b6fcd2ecb2aecd134419b198c9f519029d5e7"
+ integrity sha512-0jfigiqkUwIuKOw7Me5tqUM9BBvoQX7qqeevx7v4tkYQexPhk3VYSZo7aRuoJ9oyW5makCTPX551PMJzmq7+sw==
+ dependencies:
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/module-type-aliases" "2.0.1"
+ "@docusaurus/plugin-content-blog" "2.0.1"
+ "@docusaurus/plugin-content-docs" "2.0.1"
+ "@docusaurus/plugin-content-pages" "2.0.1"
+ "@docusaurus/theme-common" "2.0.1"
+ "@docusaurus/theme-translations" "2.0.1"
+ "@docusaurus/types" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-common" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ "@mdx-js/react" "^1.6.22"
+ clsx "^1.2.1"
+ copy-text-to-clipboard "^3.0.1"
+ infima "0.2.0-alpha.42"
+ lodash "^4.17.21"
+ nprogress "^0.2.0"
+ postcss "^8.4.14"
+ prism-react-renderer "^1.3.5"
+ prismjs "^1.28.0"
+ react-router-dom "^5.3.3"
+ rtlcss "^3.5.0"
+ tslib "^2.4.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-common@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.0.1.tgz#9594d58fbef11fe480967b5ce4cdbb3cd78d9ca3"
+ integrity sha512-I3b6e/ryiTQMsbES40cP0DRGnfr0E2qghVq+XecyMKjBPejISoSFEDn0MsnbW8Q26k1Dh/0qDH8QKDqaZZgLhA==
+ dependencies:
+ "@docusaurus/mdx-loader" "2.0.1"
+ "@docusaurus/module-type-aliases" "2.0.1"
+ "@docusaurus/plugin-content-blog" "2.0.1"
+ "@docusaurus/plugin-content-docs" "2.0.1"
+ "@docusaurus/plugin-content-pages" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router-config" "*"
+ clsx "^1.2.1"
+ parse-numeric-range "^1.3.0"
+ prism-react-renderer "^1.3.5"
+ tslib "^2.4.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-search-algolia@2.0.1", "@docusaurus/theme-search-algolia@^2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.1.tgz#0aab8407b2163f67eb4c48f1de33944e1695fa74"
+ integrity sha512-cw3NaOSKbYlsY6uNj4PgO+5mwyQ3aEWre5RlmvjStaz2cbD15Nr69VG8Rd/F6Q5VsCT8BvSdkPDdDG5d/ACexg==
+ dependencies:
+ "@docsearch/react" "^3.1.1"
+ "@docusaurus/core" "2.0.1"
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/plugin-content-docs" "2.0.1"
+ "@docusaurus/theme-common" "2.0.1"
+ "@docusaurus/theme-translations" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ "@docusaurus/utils-validation" "2.0.1"
+ algoliasearch "^4.13.1"
+ algoliasearch-helper "^3.10.0"
+ clsx "^1.2.1"
+ eta "^1.12.3"
+ fs-extra "^10.1.0"
+ lodash "^4.17.21"
+ tslib "^2.4.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-translations@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.0.1.tgz#955a687c974265a811bfc743d98ef3eab0379100"
+ integrity sha512-v1MYYlbsdX+rtKnXFcIAn9ar0Z6K0yjqnCYS0p/KLCLrfJwfJ8A3oRJw2HiaIb8jQfk1WMY2h5Qi1p4vHOekQw==
+ dependencies:
+ fs-extra "^10.1.0"
+ tslib "^2.4.0"
+
+"@docusaurus/types@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/types/-/types-2.0.1.tgz#8696a70e85c4b9be80b38ac592d520f6fe72618b"
+ integrity sha512-o+4hAFWkj3sBszVnRTAnNqtAIuIW0bNaYyDwQhQ6bdz3RAPEq9cDKZxMpajsj4z2nRty8XjzhyufAAjxFTyrfg==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ commander "^5.1.0"
+ joi "^17.6.0"
+ react-helmet-async "^1.3.0"
+ utility-types "^3.10.0"
+ webpack "^5.73.0"
+ webpack-merge "^5.8.0"
+
+"@docusaurus/utils-common@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.0.1.tgz#b6f2b029547f739e1431ec84abd16974edf495e0"
+ integrity sha512-kajCCDCXRd1HFH5EUW31MPaQcsyNlGakpkDoTBtBvpa4EIPvWaSKy7TIqYKHrZjX4tnJ0YbEJvaXfjjgdq5xSg==
+ dependencies:
+ tslib "^2.4.0"
+
+"@docusaurus/utils-validation@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.0.1.tgz#69f7d4944288d71f00fdba6dde10f05008f04308"
+ integrity sha512-f14AnwFBy4/1A19zWthK+Ii80YDz+4qt8oPpK3julywXsheSxPBqgsND3LVBBvB2p3rJHvbo2m3HyB9Tco1JRw==
+ dependencies:
+ "@docusaurus/logger" "2.0.1"
+ "@docusaurus/utils" "2.0.1"
+ joi "^17.6.0"
+ js-yaml "^4.1.0"
+ tslib "^2.4.0"
+
+"@docusaurus/utils@2.0.1":
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.1.tgz#37b4b42e29175e5d2d811fcbf9f93bffeca7c353"
+ integrity sha512-u2Vdl/eoVwMfUjDCkg7FjxoiwFs/XhVVtNxQEw8cvB+qaw6QWyT73m96VZzWtUb1fDOefHoZ+bZ0ObFeKk9lMQ==
+ dependencies:
+ "@docusaurus/logger" "2.0.1"
+ "@svgr/webpack" "^6.2.1"
+ file-loader "^6.2.0"
+ fs-extra "^10.1.0"
+ github-slugger "^1.4.0"
+ globby "^11.1.0"
+ gray-matter "^4.0.3"
+ js-yaml "^4.1.0"
+ lodash "^4.17.21"
+ micromatch "^4.0.5"
+ resolve-pathname "^3.0.0"
+ shelljs "^0.8.5"
+ tslib "^2.4.0"
+ url-loader "^4.1.1"
+ webpack "^5.73.0"
+
+"@eslint/eslintrc@^1.3.0":
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f"
+ integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==
+ dependencies:
+ ajv "^6.12.4"
+ debug "^4.3.2"
+ espree "^9.3.2"
+ globals "^13.15.0"
+ ignore "^5.2.0"
+ import-fresh "^3.2.1"
+ js-yaml "^4.1.0"
+ minimatch "^3.1.2"
+ strip-json-comments "^3.1.1"
+
+"@hapi/hoek@^9.0.0":
+ version "9.3.0"
+ resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb"
+ integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==
+
+"@hapi/topo@^5.0.0":
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012"
+ integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==
+ dependencies:
+ "@hapi/hoek" "^9.0.0"
+
+"@humanwhocodes/config-array@^0.9.2":
+ version "0.9.5"
+ resolved "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7"
+ integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==
+ dependencies:
+ "@humanwhocodes/object-schema" "^1.2.1"
+ debug "^4.1.1"
+ minimatch "^3.0.4"
+
+"@humanwhocodes/object-schema@^1.2.1":
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
+ integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
+
+"@jridgewell/gen-mapping@^0.1.0":
+ version "0.1.1"
+ resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996"
+ integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==
+ dependencies:
+ "@jridgewell/set-array" "^1.0.0"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+
+"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2":
+ version "0.3.2"
+ resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9"
+ integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==
+ dependencies:
+ "@jridgewell/set-array" "^1.0.1"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+ "@jridgewell/trace-mapping" "^0.3.9"
+
+"@jridgewell/resolve-uri@^3.0.3":
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78"
+ integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==
+
+"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1":
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72"
+ integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==
+
+"@jridgewell/source-map@^0.3.2":
+ version "0.3.2"
+ resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb"
+ integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==
+ dependencies:
+ "@jridgewell/gen-mapping" "^0.3.0"
+ "@jridgewell/trace-mapping" "^0.3.9"
+
+"@jridgewell/sourcemap-codec@^1.4.10":
+ version "1.4.14"
+ resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24"
+ integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==
+
+"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9":
+ version "0.3.14"
+ resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed"
+ integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==
+ dependencies:
+ "@jridgewell/resolve-uri" "^3.0.3"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+
+"@leichtgewicht/ip-codec@^2.0.1":
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b"
+ integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==
+
+"@mdx-js/mdx@^1.6.22":
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz#8a723157bf90e78f17dc0f27995398e6c731f1ba"
+ integrity sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==
+ dependencies:
+ "@babel/core" "7.12.9"
+ "@babel/plugin-syntax-jsx" "7.12.1"
+ "@babel/plugin-syntax-object-rest-spread" "7.8.3"
+ "@mdx-js/util" "1.6.22"
+ babel-plugin-apply-mdx-type-prop "1.6.22"
+ babel-plugin-extract-import-names "1.6.22"
+ camelcase-css "2.0.1"
+ detab "2.0.4"
+ hast-util-raw "6.0.1"
+ lodash.uniq "4.5.0"
+ mdast-util-to-hast "10.0.1"
+ remark-footnotes "2.0.0"
+ remark-mdx "1.6.22"
+ remark-parse "8.0.3"
+ remark-squeeze-paragraphs "4.0.0"
+ style-to-object "0.3.0"
+ unified "9.2.0"
+ unist-builder "2.0.3"
+ unist-util-visit "2.0.3"
+
+"@mdx-js/react@^1.6.22":
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz#ae09b4744fddc74714ee9f9d6f17a66e77c43573"
+ integrity sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==
+
+"@mdx-js/util@1.6.22":
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz#219dfd89ae5b97a8801f015323ffa4b62f45718b"
+ integrity sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==
+
+"@nodelib/fs.scandir@2.1.5":
+ version "2.1.5"
+ resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
+ integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
+ dependencies:
+ "@nodelib/fs.stat" "2.0.5"
+ run-parallel "^1.1.9"
+
+"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2":
+ version "2.0.5"
+ resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
+ integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
+
+"@nodelib/fs.walk@^1.2.3":
+ version "1.2.8"
+ resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
+ integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
+ dependencies:
+ "@nodelib/fs.scandir" "2.1.5"
+ fastq "^1.6.0"
+
+"@polka/url@^1.0.0-next.20":
+ version "1.0.0-next.21"
+ resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz#5de5a2385a35309427f6011992b544514d559aa1"
+ integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==
+
+"@sideway/address@^4.1.3":
+ version "4.1.4"
+ resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
+ integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==
+ dependencies:
+ "@hapi/hoek" "^9.0.0"
+
+"@sideway/formula@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz#fe158aee32e6bd5de85044be615bc08478a0a13c"
+ integrity sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==
+
+"@sideway/pinpoint@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df"
+ integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
+
+"@sindresorhus/is@^0.14.0":
+ version "0.14.0"
+ resolved "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea"
+ integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==
+
+"@slorber/static-site-generator-webpack-plugin@^4.0.7":
+ version "4.0.7"
+ resolved "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz#fc1678bddefab014e2145cbe25b3ce4e1cfc36f3"
+ integrity sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==
+ dependencies:
+ eval "^0.1.8"
+ p-map "^4.0.0"
+ webpack-sources "^3.2.2"
+
+"@svgr/babel-plugin-add-jsx-attribute@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.3.0.tgz#eae9f3255da5e6f5d1ec115e4ddcca65709a8611"
+ integrity sha512-3XzJy0dCVEOE2o2Wn8tF9SdQ2na1Q7jJNzIs3+27RHPpEiuqlClBNhIOhPFKr95+bUGtL6nZIgqY8xBhMw0p6g==
+
+"@svgr/babel-plugin-remove-jsx-attribute@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.3.0.tgz#b4910cb52a1499f59ab65c6b1483424913e87768"
+ integrity sha512-zD0sTwXpL78pWaxWxCyqimfukPcJfToKuwW1Po00pUeOYT6KuMQrPnG6XIZpLadydOo+fght8SoxwRb5O9TtWA==
+
+"@svgr/babel-plugin-remove-jsx-empty-expression@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.3.0.tgz#0f8b0969f36096be9f64f2ed052ade314779a3f4"
+ integrity sha512-COsMIL1BRU/ZxFTvd59NFzJPIdvBkV19Jrn7w1NwFmglOUrpchPRSzfW6FzWUh2C8nzJrnjDn6V7i7klVhHZEA==
+
+"@svgr/babel-plugin-replace-jsx-attribute-value@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.3.0.tgz#185b2ff136a703f32a84e16e5bb533ca4d5f42fa"
+ integrity sha512-mKk2uqn1/7dk2I82fYaiLTw12eqmZZ2ZzH3WVhzzLvMXrLIxc9xYFJBNRMrV+77ZDHd791933HWSNChtGeJLQg==
+
+"@svgr/babel-plugin-svg-dynamic-title@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.3.0.tgz#e06db7f06eb6be5bd9300a0d964521ef9eee589b"
+ integrity sha512-jdQJa8DZHfo2POTmgl8ZmDEcpTEz4n6RsANle1DbbC8CGq+1k/RV4MkRL1ceqIJCSOW3ypk23gpG5Q4xlSiY7Q==
+
+"@svgr/babel-plugin-svg-em-dimensions@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.3.0.tgz#1369f2c7c5c725b532224a7a00e500d267a728aa"
+ integrity sha512-yPogu5hLcF5FXCU3a3sCtsP+lloLBkIxM+xplumKwIdQNN28qb+HmFxVLUkT0+MD3y+77DjTtukJzkEBqL/BsA==
+
+"@svgr/babel-plugin-transform-react-native-svg@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.3.0.tgz#72cafb778198af1f9f0be6bfaf369f2cd7746ac5"
+ integrity sha512-Eso0uWFLN8kpR/MB+mD6j0WOTSUPWpyXpEkYt6sg4GItEMvScWgZV8H986CU09oXceaG8AovgPvYdygiJuRsRA==
+
+"@svgr/babel-plugin-transform-svg-component@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.3.0.tgz#95d91c49127211d790fa08517cb0bf0b3f363565"
+ integrity sha512-e9tSsPAHibGyZDPqQ8a5OIDuuON2YY6+XeCr6WqxVLwj+nIqbUOmNNZpekNsUv/gZ6UbtzEpGfZMiZavpavqDg==
+
+"@svgr/babel-preset@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.3.0.tgz#b09393095b61cb889f103264d326bd177dc310dc"
+ integrity sha512-N1UWDZy/kxGW9G4q4jRD+Jyn0N+LmKw0yb9HwAWBZdFBu4ckKtc7lJLHvIFou51r11r/BsZWiJPje3fDLnTMtA==
+ dependencies:
+ "@svgr/babel-plugin-add-jsx-attribute" "^6.3.0"
+ "@svgr/babel-plugin-remove-jsx-attribute" "^6.3.0"
+ "@svgr/babel-plugin-remove-jsx-empty-expression" "^6.3.0"
+ "@svgr/babel-plugin-replace-jsx-attribute-value" "^6.3.0"
+ "@svgr/babel-plugin-svg-dynamic-title" "^6.3.0"
+ "@svgr/babel-plugin-svg-em-dimensions" "^6.3.0"
+ "@svgr/babel-plugin-transform-react-native-svg" "^6.3.0"
+ "@svgr/babel-plugin-transform-svg-component" "^6.3.0"
+
+"@svgr/core@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/core/-/core-6.3.0.tgz#4d8f086c8e5121d490fe06523dad8305b159d20f"
+ integrity sha512-olON7KzAQR4oBbnRmSgJkQrpqPbHd6wURAfTR+HN+6GpcJxknEEDC+l+bpEE/jz2K4lcHex91A2cRUlsGMCazg==
+ dependencies:
+ "@svgr/plugin-jsx" "^6.3.0"
+ camelcase "^6.2.0"
+ cosmiconfig "^7.0.1"
+
+"@svgr/hast-util-to-babel-ast@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.3.0.tgz#594a2503044ff5b66a692a55217aedd99d6b341e"
+ integrity sha512-dlIzHVpWhjMlcTrYUSovfr4MOzm+1I8e9yIAF5eiZU5XNHs8hYDS5xL2QDakt5wd1/2MEtJie97GsCOotlstpA==
+ dependencies:
+ "@babel/types" "^7.18.4"
+ entities "^4.3.0"
+
+"@svgr/plugin-jsx@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.3.0.tgz#df353e0c1618c7a212ce5da63876208fea62e303"
+ integrity sha512-1yr719Dx7c43rgqUaWaYF195bCZ/kZyPk5nWjdRwNaKqfARCfH0tTquD0a9nWkOTFnLSTGytjGdBqLNRw4X0Yw==
+ dependencies:
+ "@babel/core" "^7.18.5"
+ "@svgr/babel-preset" "^6.3.0"
+ "@svgr/hast-util-to-babel-ast" "^6.3.0"
+ svg-parser "^2.0.4"
+
+"@svgr/plugin-svgo@^6.3.0":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.3.0.tgz#4d42573330407c2ec92854e172d569516052750b"
+ integrity sha512-HFbuewy6Gm8jZu1xqbdOB7zKipgf5DgcRG421uVfqgGredBIl1eLt2B0Qr3pFXQE8OTmRqJsZbjKpfrOu1BwkA==
+ dependencies:
+ cosmiconfig "^7.0.1"
+ deepmerge "^4.2.2"
+ svgo "^2.8.0"
+
+"@svgr/webpack@^6.2.1":
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.3.0.tgz#62d3681e6999c170f67edf7f77dcb95c63bafe42"
+ integrity sha512-mtIQaV492zUu2Fq1BZRlrFf3PO1ONzfHZCki7h7ZDHWPuPi6hx32X4lNhN+tT4phPw/Sb8xPj7JNHn5Eobm/WQ==
+ dependencies:
+ "@babel/core" "^7.18.5"
+ "@babel/plugin-transform-react-constant-elements" "^7.17.12"
+ "@babel/preset-env" "^7.18.2"
+ "@babel/preset-react" "^7.17.12"
+ "@babel/preset-typescript" "^7.17.12"
+ "@svgr/core" "^6.3.0"
+ "@svgr/plugin-jsx" "^6.3.0"
+ "@svgr/plugin-svgo" "^6.3.0"
+
+"@szmarczak/http-timer@^1.1.2":
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
+ integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==
+ dependencies:
+ defer-to-connect "^1.0.1"
+
+"@trysound/sax@0.2.0":
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad"
+ integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==
+
+"@types/body-parser@*":
+ version "1.19.2"
+ resolved "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0"
+ integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==
+ dependencies:
+ "@types/connect" "*"
+ "@types/node" "*"
+
+"@types/bonjour@^3.5.9":
+ version "3.5.10"
+ resolved "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275"
+ integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/connect-history-api-fallback@^1.3.5":
+ version "1.3.5"
+ resolved "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae"
+ integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw==
+ dependencies:
+ "@types/express-serve-static-core" "*"
+ "@types/node" "*"
+
+"@types/connect@*":
+ version "3.4.35"
+ resolved "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1"
+ integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/eslint-scope@^3.7.3":
+ version "3.7.4"
+ resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16"
+ integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==
+ dependencies:
+ "@types/eslint" "*"
+ "@types/estree" "*"
+
+"@types/eslint@*":
+ version "8.4.5"
+ resolved "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.5.tgz#acdfb7dd36b91cc5d812d7c093811a8f3d9b31e4"
+ integrity sha512-dhsC09y1gpJWnK+Ff4SGvCuSnk9DaU0BJZSzOwa6GVSg65XtTugLBITDAAzRU5duGBoXBHpdR/9jHGxJjNflJQ==
+ dependencies:
+ "@types/estree" "*"
+ "@types/json-schema" "*"
+
+"@types/estree@*":
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2"
+ integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==
+
+"@types/estree@^0.0.51":
+ version "0.0.51"
+ resolved "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40"
+ integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==
+
+"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18":
+ version "4.17.29"
+ resolved "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c"
+ integrity sha512-uMd++6dMKS32EOuw1Uli3e3BPgdLIXmezcfHv7N4c1s3gkhikBplORPpMq3fuWkxncZN1reb16d5n8yhQ80x7Q==
+ dependencies:
+ "@types/node" "*"
+ "@types/qs" "*"
+ "@types/range-parser" "*"
+
+"@types/express@*", "@types/express@^4.17.13":
+ version "4.17.13"
+ resolved "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034"
+ integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==
+ dependencies:
+ "@types/body-parser" "*"
+ "@types/express-serve-static-core" "^4.17.18"
+ "@types/qs" "*"
+ "@types/serve-static" "*"
+
+"@types/hast@^2.0.0":
+ version "2.3.4"
+ resolved "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz#8aa5ef92c117d20d974a82bdfb6a648b08c0bafc"
+ integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==
+ dependencies:
+ "@types/unist" "*"
+
+"@types/history@^4.7.11":
+ version "4.7.11"
+ resolved "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz#56588b17ae8f50c53983a524fc3cc47437969d64"
+ integrity sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==
+
+"@types/html-minifier-terser@^6.0.0":
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35"
+ integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==
+
+"@types/http-proxy@^1.17.8":
+ version "1.17.9"
+ resolved "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a"
+ integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9":
+ version "7.0.11"
+ resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3"
+ integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==
+
+"@types/json5@^0.0.29":
+ version "0.0.29"
+ resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
+ integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==
+
+"@types/mdast@^3.0.0":
+ version "3.0.10"
+ resolved "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af"
+ integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==
+ dependencies:
+ "@types/unist" "*"
+
+"@types/mime@^1":
+ version "1.3.2"
+ resolved "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
+ integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
+
+"@types/minimist@^1.2.0":
+ version "1.2.2"
+ resolved "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c"
+ integrity sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==
+
+"@types/node@*":
+ version "18.0.6"
+ resolved "https://registry.npmjs.org/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7"
+ integrity sha512-/xUq6H2aQm261exT6iZTMifUySEt4GR5KX8eYyY+C4MSNPqSh9oNIP7tz2GLKTlFaiBbgZNxffoR3CVRG+cljw==
+
+"@types/node@^17.0.5":
+ version "17.0.45"
+ resolved "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190"
+ integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==
+
+"@types/normalize-package-data@^2.4.0":
+ version "2.4.1"
+ resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301"
+ integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==
+
+"@types/parse-json@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
+ integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==
+
+"@types/parse5@^5.0.0":
+ version "5.0.3"
+ resolved "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109"
+ integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==
+
+"@types/prop-types@*":
+ version "15.7.5"
+ resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
+ integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==
+
+"@types/qs@*":
+ version "6.9.7"
+ resolved "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
+ integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==
+
+"@types/range-parser@*":
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
+ integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
+
+"@types/react-router-config@*", "@types/react-router-config@^5.0.6":
+ version "5.0.6"
+ resolved "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz#87c5c57e72d241db900d9734512c50ccec062451"
+ integrity sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router" "*"
+
+"@types/react-router-dom@*":
+ version "5.3.3"
+ resolved "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz#e9d6b4a66fcdbd651a5f106c2656a30088cc1e83"
+ integrity sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router" "*"
+
+"@types/react-router@*":
+ version "5.1.18"
+ resolved "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.18.tgz#c8851884b60bc23733500d86c1266e1cfbbd9ef3"
+ integrity sha512-YYknwy0D0iOwKQgz9v8nOzt2J6l4gouBmDnWqUUznltOTaon+r8US8ky8HvN0tXvc38U9m6z/t2RsVsnd1zM0g==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+
+"@types/react@*":
+ version "18.0.15"
+ resolved "https://registry.npmjs.org/@types/react/-/react-18.0.15.tgz#d355644c26832dc27f3e6cbf0c4f4603fc4ab7fe"
+ integrity sha512-iz3BtLuIYH1uWdsv6wXYdhozhqj20oD4/Hk2DNXIn1kFsmp9x8d9QB6FnPhfkbhd2PgEONt9Q1x/ebkwjfFLow==
+ dependencies:
+ "@types/prop-types" "*"
+ "@types/scheduler" "*"
+ csstype "^3.0.2"
+
+"@types/retry@0.12.0":
+ version "0.12.0"
+ resolved "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d"
+ integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==
+
+"@types/sax@^1.2.1":
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz#8221affa7f4f3cb21abd22f244cfabfa63e6a69e"
+ integrity sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/scheduler@*":
+ version "0.16.2"
+ resolved "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39"
+ integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==
+
+"@types/serve-index@^1.9.1":
+ version "1.9.1"
+ resolved "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278"
+ integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg==
+ dependencies:
+ "@types/express" "*"
+
+"@types/serve-static@*", "@types/serve-static@^1.13.10":
+ version "1.13.10"
+ resolved "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9"
+ integrity sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==
+ dependencies:
+ "@types/mime" "^1"
+ "@types/node" "*"
+
+"@types/sockjs@^0.3.33":
+ version "0.3.33"
+ resolved "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f"
+ integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3":
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d"
+ integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==
+
+"@types/ws@^8.5.1":
+ version "8.5.3"
+ resolved "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d"
+ integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==
+ dependencies:
+ "@types/node" "*"
+
+"@webassemblyjs/ast@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7"
+ integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==
+ dependencies:
+ "@webassemblyjs/helper-numbers" "1.11.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.1"
+
+"@webassemblyjs/floating-point-hex-parser@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f"
+ integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==
+
+"@webassemblyjs/helper-api-error@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16"
+ integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==
+
+"@webassemblyjs/helper-buffer@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5"
+ integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==
+
+"@webassemblyjs/helper-numbers@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae"
+ integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==
+ dependencies:
+ "@webassemblyjs/floating-point-hex-parser" "1.11.1"
+ "@webassemblyjs/helper-api-error" "1.11.1"
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/helper-wasm-bytecode@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1"
+ integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==
+
+"@webassemblyjs/helper-wasm-section@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a"
+ integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/helper-buffer" "1.11.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.1"
+ "@webassemblyjs/wasm-gen" "1.11.1"
+
+"@webassemblyjs/ieee754@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614"
+ integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==
+ dependencies:
+ "@xtuc/ieee754" "^1.2.0"
+
+"@webassemblyjs/leb128@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5"
+ integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==
+ dependencies:
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/utf8@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff"
+ integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==
+
+"@webassemblyjs/wasm-edit@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6"
+ integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/helper-buffer" "1.11.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.1"
+ "@webassemblyjs/helper-wasm-section" "1.11.1"
+ "@webassemblyjs/wasm-gen" "1.11.1"
+ "@webassemblyjs/wasm-opt" "1.11.1"
+ "@webassemblyjs/wasm-parser" "1.11.1"
+ "@webassemblyjs/wast-printer" "1.11.1"
+
+"@webassemblyjs/wasm-gen@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76"
+ integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.1"
+ "@webassemblyjs/ieee754" "1.11.1"
+ "@webassemblyjs/leb128" "1.11.1"
+ "@webassemblyjs/utf8" "1.11.1"
+
+"@webassemblyjs/wasm-opt@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2"
+ integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/helper-buffer" "1.11.1"
+ "@webassemblyjs/wasm-gen" "1.11.1"
+ "@webassemblyjs/wasm-parser" "1.11.1"
+
+"@webassemblyjs/wasm-parser@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199"
+ integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/helper-api-error" "1.11.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.1"
+ "@webassemblyjs/ieee754" "1.11.1"
+ "@webassemblyjs/leb128" "1.11.1"
+ "@webassemblyjs/utf8" "1.11.1"
+
+"@webassemblyjs/wast-printer@1.11.1":
+ version "1.11.1"
+ resolved "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0"
+ integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==
+ dependencies:
+ "@webassemblyjs/ast" "1.11.1"
+ "@xtuc/long" "4.2.2"
+
+"@xtuc/ieee754@^1.2.0":
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790"
+ integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==
+
+"@xtuc/long@4.2.2":
+ version "4.2.2"
+ resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d"
+ integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==
+
+accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8:
+ version "1.3.8"
+ resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
+ integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==
+ dependencies:
+ mime-types "~2.1.34"
+ negotiator "0.6.3"
+
+acorn-import-assertions@^1.7.6:
+ version "1.8.0"
+ resolved "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9"
+ integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==
+
+acorn-jsx@^5.3.2:
+ version "5.3.2"
+ resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
+ integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
+
+acorn-walk@^8.0.0:
+ version "8.2.0"
+ resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
+ integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
+
+acorn@^8.0.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1:
+ version "8.7.1"
+ resolved "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30"
+ integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==
+
+address@^1.0.1, address@^1.1.2:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9"
+ integrity sha512-tNEZYz5G/zYunxFm7sfhAxkXEuLj3K6BKwv6ZURlsF6yiUQ65z0Q2wZW9L5cPUl9ocofGvXOdFYbFHp0+6MOig==
+
+aggregate-error@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a"
+ integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==
+ dependencies:
+ clean-stack "^2.0.0"
+ indent-string "^4.0.0"
+
+ajv-formats@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520"
+ integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==
+ dependencies:
+ ajv "^8.0.0"
+
+ajv-keywords@^3.4.1, ajv-keywords@^3.5.2:
+ version "3.5.2"
+ resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d"
+ integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==
+
+ajv-keywords@^5.0.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16"
+ integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==
+ dependencies:
+ fast-deep-equal "^3.1.3"
+
+ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5:
+ version "6.12.6"
+ resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
+ integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ fast-json-stable-stringify "^2.0.0"
+ json-schema-traverse "^0.4.1"
+ uri-js "^4.2.2"
+
+ajv@^8.0.0, ajv@^8.0.1, ajv@^8.8.0:
+ version "8.11.0"
+ resolved "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f"
+ integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ json-schema-traverse "^1.0.0"
+ require-from-string "^2.0.2"
+ uri-js "^4.2.2"
+
+algoliasearch-helper@^3.10.0:
+ version "3.10.0"
+ resolved "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.10.0.tgz#59a0f645dd3c7e55cf01faa568d1af50c49d36f6"
+ integrity sha512-4E4od8qWWDMVvQ3jaRX6Oks/k35ywD011wAA4LbYMMjOtaZV6VWaTjRr4iN2bdaXP2o1BP7SLFMBf3wvnHmd8Q==
+ dependencies:
+ "@algolia/events" "^4.0.1"
+
+algoliasearch@^4.0.0, algoliasearch@^4.13.1:
+ version "4.14.1"
+ resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.14.1.tgz#7f24cabd264f8294b461d108e1603e673571e806"
+ integrity sha512-ZWqnbsGUgU03/IyG995pMCc+EmNVDA/4c9ntr8B0dWQwFqazOQ4ErvKZxarbgSNmyPo/eZcVsTb0bNplJMttGQ==
+ dependencies:
+ "@algolia/cache-browser-local-storage" "4.14.1"
+ "@algolia/cache-common" "4.14.1"
+ "@algolia/cache-in-memory" "4.14.1"
+ "@algolia/client-account" "4.14.1"
+ "@algolia/client-analytics" "4.14.1"
+ "@algolia/client-common" "4.14.1"
+ "@algolia/client-personalization" "4.14.1"
+ "@algolia/client-search" "4.14.1"
+ "@algolia/logger-common" "4.14.1"
+ "@algolia/logger-console" "4.14.1"
+ "@algolia/requester-browser-xhr" "4.14.1"
+ "@algolia/requester-common" "4.14.1"
+ "@algolia/requester-node-http" "4.14.1"
+ "@algolia/transporter" "4.14.1"
+
+ansi-align@^3.0.0, ansi-align@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59"
+ integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==
+ dependencies:
+ string-width "^4.1.0"
+
+ansi-html-community@^0.0.8:
+ version "0.0.8"
+ resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41"
+ integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==
+
+ansi-regex@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
+ integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+
+ansi-regex@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a"
+ integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==
+
+ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ dependencies:
+ color-convert "^1.9.0"
+
+ansi-styles@^4.0.0, ansi-styles@^4.1.0:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+ integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+ dependencies:
+ color-convert "^2.0.1"
+
+ansi-styles@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.1.0.tgz#87313c102b8118abd57371afab34618bf7350ed3"
+ integrity sha512-VbqNsoz55SYGczauuup0MFUyXNQviSpFTj1RQtFzmQLk18qbVSpTFFGMT293rmDaQuKCT6InmbuEyUne4mTuxQ==
+
+anymatch@~3.1.2:
+ version "3.1.2"
+ resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716"
+ integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==
+ dependencies:
+ normalize-path "^3.0.0"
+ picomatch "^2.0.4"
+
+arg@^5.0.0:
+ version "5.0.2"
+ resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
+ integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
+argparse@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
+ integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
+
+aria-query@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b"
+ integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==
+ dependencies:
+ "@babel/runtime" "^7.10.2"
+ "@babel/runtime-corejs3" "^7.10.2"
+
+array-flatten@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+ integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==
+
+array-flatten@^2.1.2:
+ version "2.1.2"
+ resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099"
+ integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==
+
+array-includes@^3.1.4, array-includes@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb"
+ integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.4"
+ es-abstract "^1.19.5"
+ get-intrinsic "^1.1.1"
+ is-string "^1.0.7"
+
+array-union@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d"
+ integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==
+
+array.prototype.flat@^1.2.5:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b"
+ integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.2"
+ es-shim-unscopables "^1.0.0"
+
+array.prototype.flatmap@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f"
+ integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.2"
+ es-shim-unscopables "^1.0.0"
+
+arrify@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
+ integrity sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==
+
+asap@~2.0.3:
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
+ integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==
+
+ast-types-flow@^0.0.7:
+ version "0.0.7"
+ resolved "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
+ integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==
+
+astral-regex@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31"
+ integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==
+
+at-least-node@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
+ integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
+
+autoprefixer@^10.3.7, autoprefixer@^10.4.7:
+ version "10.4.7"
+ resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.7.tgz#1db8d195f41a52ca5069b7593be167618edbbedf"
+ integrity sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA==
+ dependencies:
+ browserslist "^4.20.3"
+ caniuse-lite "^1.0.30001335"
+ fraction.js "^4.2.0"
+ normalize-range "^0.1.2"
+ picocolors "^1.0.0"
+ postcss-value-parser "^4.2.0"
+
+axe-core@^4.4.2:
+ version "4.4.3"
+ resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f"
+ integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==
+
+axios@^0.25.0:
+ version "0.25.0"
+ resolved "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz#349cfbb31331a9b4453190791760a8d35b093e0a"
+ integrity sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==
+ dependencies:
+ follow-redirects "^1.14.7"
+
+axobject-query@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be"
+ integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==
+
+babel-loader@^8.2.5:
+ version "8.2.5"
+ resolved "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e"
+ integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==
+ dependencies:
+ find-cache-dir "^3.3.1"
+ loader-utils "^2.0.0"
+ make-dir "^3.1.0"
+ schema-utils "^2.6.5"
+
+babel-plugin-apply-mdx-type-prop@1.6.22:
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz#d216e8fd0de91de3f1478ef3231e05446bc8705b"
+ integrity sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "7.10.4"
+ "@mdx-js/util" "1.6.22"
+
+babel-plugin-dynamic-import-node@^2.3.3:
+ version "2.3.3"
+ resolved "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3"
+ integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==
+ dependencies:
+ object.assign "^4.1.0"
+
+babel-plugin-extract-import-names@1.6.22:
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz#de5f9a28eb12f3eb2578bf74472204e66d1a13dc"
+ integrity sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "7.10.4"
+
+babel-plugin-polyfill-corejs2@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz#440f1b70ccfaabc6b676d196239b138f8a2cfba5"
+ integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==
+ dependencies:
+ "@babel/compat-data" "^7.13.11"
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+ semver "^6.1.1"
+
+babel-plugin-polyfill-corejs3@^0.5.2:
+ version "0.5.2"
+ resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz#aabe4b2fa04a6e038b688c5e55d44e78cd3a5f72"
+ integrity sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+ core-js-compat "^3.21.0"
+
+babel-plugin-polyfill-regenerator@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz#2c0678ea47c75c8cc2fbb1852278d8fb68233990"
+ integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+
+bail@^1.0.0:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776"
+ integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==
+
+balanced-match@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+ integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
+
+balanced-match@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-2.0.0.tgz#dc70f920d78db8b858535795867bf48f820633d9"
+ integrity sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==
+
+base16@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz#e297f60d7ec1014a7a971a39ebc8a98c0b681e70"
+ integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==
+
+batch@0.6.1:
+ version "0.6.1"
+ resolved "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
+ integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==
+
+big.js@^5.2.2:
+ version "5.2.2"
+ resolved "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
+ integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==
+
+binary-extensions@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
+ integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
+
+body-parser@1.20.0:
+ version "1.20.0"
+ resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5"
+ integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==
+ dependencies:
+ bytes "3.1.2"
+ content-type "~1.0.4"
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ on-finished "2.4.1"
+ qs "6.10.3"
+ raw-body "2.5.1"
+ type-is "~1.6.18"
+ unpipe "1.0.0"
+
+bonjour-service@^1.0.11:
+ version "1.0.13"
+ resolved "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.0.13.tgz#4ac003dc1626023252d58adf2946f57e5da450c1"
+ integrity sha512-LWKRU/7EqDUC9CTAQtuZl5HzBALoCYwtLhffW3et7vZMwv3bWLpJf8bRYlMD5OCcDpTfnPgNCV4yo9ZIaJGMiA==
+ dependencies:
+ array-flatten "^2.1.2"
+ dns-equal "^1.0.0"
+ fast-deep-equal "^3.1.3"
+ multicast-dns "^7.2.5"
+
+boolbase@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
+ integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
+
+boxen@^5.0.0:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50"
+ integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==
+ dependencies:
+ ansi-align "^3.0.0"
+ camelcase "^6.2.0"
+ chalk "^4.1.0"
+ cli-boxes "^2.2.1"
+ string-width "^4.2.2"
+ type-fest "^0.20.2"
+ widest-line "^3.1.0"
+ wrap-ansi "^7.0.0"
+
+boxen@^6.2.1:
+ version "6.2.1"
+ resolved "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz#b098a2278b2cd2845deef2dff2efc38d329b434d"
+ integrity sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==
+ dependencies:
+ ansi-align "^3.0.1"
+ camelcase "^6.2.0"
+ chalk "^4.1.2"
+ cli-boxes "^3.0.0"
+ string-width "^5.0.1"
+ type-fest "^2.5.0"
+ widest-line "^4.0.1"
+ wrap-ansi "^8.0.1"
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+braces@^3.0.2, braces@~3.0.2:
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
+ integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
+ dependencies:
+ fill-range "^7.0.1"
+
+browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3, browserslist@^4.21.2:
+ version "4.21.2"
+ resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.2.tgz#59a400757465535954946a400b841ed37e2b4ecf"
+ integrity sha512-MonuOgAtUB46uP5CezYbRaYKBNt2LxP0yX+Pmj4LkcDFGkn9Cbpi83d9sCjwQDErXsIJSzY5oKGDbgOlF/LPAA==
+ dependencies:
+ caniuse-lite "^1.0.30001366"
+ electron-to-chromium "^1.4.188"
+ node-releases "^2.0.6"
+ update-browserslist-db "^1.0.4"
+
+buffer-from@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
+ integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
+
+bytes@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
+ integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==
+
+bytes@3.1.2:
+ version "3.1.2"
+ resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
+ integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
+
+cacheable-request@^6.0.0:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912"
+ integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==
+ dependencies:
+ clone-response "^1.0.2"
+ get-stream "^5.1.0"
+ http-cache-semantics "^4.0.0"
+ keyv "^3.0.0"
+ lowercase-keys "^2.0.0"
+ normalize-url "^4.1.0"
+ responselike "^1.0.2"
+
+call-bind@^1.0.0, call-bind@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
+ integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==
+ dependencies:
+ function-bind "^1.1.1"
+ get-intrinsic "^1.0.2"
+
+callsites@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
+ integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+
+camel-case@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a"
+ integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==
+ dependencies:
+ pascal-case "^3.1.2"
+ tslib "^2.0.3"
+
+camelcase-css@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5"
+ integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==
+
+camelcase-keys@^6.2.2:
+ version "6.2.2"
+ resolved "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0"
+ integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==
+ dependencies:
+ camelcase "^5.3.1"
+ map-obj "^4.0.0"
+ quick-lru "^4.0.1"
+
+camelcase@^5.3.1:
+ version "5.3.1"
+ resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
+ integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
+
+camelcase@^6.2.0:
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
+ integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
+
+caniuse-api@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0"
+ integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==
+ dependencies:
+ browserslist "^4.0.0"
+ caniuse-lite "^1.0.0"
+ lodash.memoize "^4.1.2"
+ lodash.uniq "^4.5.0"
+
+caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001335, caniuse-lite@^1.0.30001366:
+ version "1.0.30001367"
+ resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001367.tgz#2b97fe472e8fa29c78c5970615d7cd2ee414108a"
+ integrity sha512-XDgbeOHfifWV3GEES2B8rtsrADx4Jf+juKX2SICJcaUhjYBO3bR96kvEIHa15VU6ohtOhBZuPGGYGbXMRn0NCw==
+
+ccount@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043"
+ integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==
+
+chalk@^2.0.0:
+ version "2.4.2"
+ resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+ integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+ integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
+
+character-entities-legacy@^1.0.0:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1"
+ integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==
+
+character-entities@^1.0.0:
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz#e12c3939b7eaf4e5b15e7ad4c5e28e1d48c5b16b"
+ integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==
+
+character-reference-invalid@^1.0.0:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560"
+ integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==
+
+cheerio-select@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4"
+ integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==
+ dependencies:
+ boolbase "^1.0.0"
+ css-select "^5.1.0"
+ css-what "^6.1.0"
+ domelementtype "^2.3.0"
+ domhandler "^5.0.3"
+ domutils "^3.0.1"
+
+cheerio@^1.0.0-rc.12:
+ version "1.0.0-rc.12"
+ resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683"
+ integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==
+ dependencies:
+ cheerio-select "^2.1.0"
+ dom-serializer "^2.0.0"
+ domhandler "^5.0.3"
+ domutils "^3.0.1"
+ htmlparser2 "^8.0.1"
+ parse5 "^7.0.0"
+ parse5-htmlparser2-tree-adapter "^7.0.0"
+
+chokidar@^3.4.2, chokidar@^3.5.3:
+ version "3.5.3"
+ resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd"
+ integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==
+ dependencies:
+ anymatch "~3.1.2"
+ braces "~3.0.2"
+ glob-parent "~5.1.2"
+ is-binary-path "~2.1.0"
+ is-glob "~4.0.1"
+ normalize-path "~3.0.0"
+ readdirp "~3.6.0"
+ optionalDependencies:
+ fsevents "~2.3.2"
+
+chrome-trace-event@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac"
+ integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==
+
+ci-info@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
+ integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
+
+clean-css@^5.2.2, clean-css@^5.3.0:
+ version "5.3.1"
+ resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32"
+ integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg==
+ dependencies:
+ source-map "~0.6.0"
+
+clean-stack@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
+ integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
+
+cli-boxes@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
+ integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
+
+cli-boxes@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz#71a10c716feeba005e4504f36329ef0b17cf3145"
+ integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==
+
+cli-table3@^0.6.2:
+ version "0.6.2"
+ resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz#aaf5df9d8b5bf12634dc8b3040806a0c07120d2a"
+ integrity sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==
+ dependencies:
+ string-width "^4.2.0"
+ optionalDependencies:
+ "@colors/colors" "1.5.0"
+
+clone-deep@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387"
+ integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==
+ dependencies:
+ is-plain-object "^2.0.4"
+ kind-of "^6.0.2"
+ shallow-clone "^3.0.0"
+
+clone-regexp@^2.1.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/clone-regexp/-/clone-regexp-2.2.0.tgz#7d65e00885cd8796405c35a737e7a86b7429e36f"
+ integrity sha512-beMpP7BOtTipFuW8hrJvREQ2DrRu3BE7by0ZpibtfBA+qfHYvMGTc2Yb1JMYPKg/JUw0CHYvpg796aNTSW9z7Q==
+ dependencies:
+ is-regexp "^2.0.0"
+
+clone-response@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3"
+ integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==
+ dependencies:
+ mimic-response "^1.0.0"
+
+clsx@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12"
+ integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==
+
+collapse-white-space@^1.0.2:
+ version "1.0.6"
+ resolved "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287"
+ integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==
+
+color-convert@^1.9.0:
+ version "1.9.3"
+ resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+ integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+ dependencies:
+ color-name "1.1.3"
+
+color-convert@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+ integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+ dependencies:
+ color-name "~1.1.4"
+
+color-name@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+ integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+
+color-name@~1.1.4:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+ integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+colord@^2.9.1, colord@^2.9.2:
+ version "2.9.2"
+ resolved "https://registry.npmjs.org/colord/-/colord-2.9.2.tgz#25e2bacbbaa65991422c07ea209e2089428effb1"
+ integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ==
+
+colorette@^2.0.10:
+ version "2.0.19"
+ resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798"
+ integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==
+
+combine-promises@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz#72db90743c0ca7aab7d0d8d2052fd7b0f674de71"
+ integrity sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg==
+
+comma-separated-tokens@^1.0.0:
+ version "1.0.8"
+ resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea"
+ integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==
+
+commander@^2.20.0:
+ version "2.20.3"
+ resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
+ integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
+
+commander@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae"
+ integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==
+
+commander@^7.2.0:
+ version "7.2.0"
+ resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
+ integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
+
+commander@^8.3.0:
+ version "8.3.0"
+ resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66"
+ integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==
+
+commondir@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
+ integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==
+
+compressible@~2.0.16:
+ version "2.0.18"
+ resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
+ integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
+ dependencies:
+ mime-db ">= 1.43.0 < 2"
+
+compression@^1.7.4:
+ version "1.7.4"
+ resolved "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
+ integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
+ dependencies:
+ accepts "~1.3.5"
+ bytes "3.0.0"
+ compressible "~2.0.16"
+ debug "2.6.9"
+ on-headers "~1.0.2"
+ safe-buffer "5.1.2"
+ vary "~1.1.2"
+
+concat-map@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+ integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
+
+configstore@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
+ integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
+ dependencies:
+ dot-prop "^5.2.0"
+ graceful-fs "^4.1.2"
+ make-dir "^3.0.0"
+ unique-string "^2.0.0"
+ write-file-atomic "^3.0.0"
+ xdg-basedir "^4.0.0"
+
+confusing-browser-globals@^1.0.10:
+ version "1.0.11"
+ resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81"
+ integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==
+
+connect-history-api-fallback@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8"
+ integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==
+
+consola@^2.15.3:
+ version "2.15.3"
+ resolved "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz#2e11f98d6a4be71ff72e0bdf07bd23e12cb61550"
+ integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==
+
+content-disposition@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4"
+ integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==
+
+content-disposition@0.5.4:
+ version "0.5.4"
+ resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
+ integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
+ dependencies:
+ safe-buffer "5.2.1"
+
+content-type@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
+ integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
+
+convert-source-map@^1.7.0:
+ version "1.8.0"
+ resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369"
+ integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==
+ dependencies:
+ safe-buffer "~5.1.1"
+
+cookie-signature@1.0.6:
+ version "1.0.6"
+ resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
+ integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
+
+cookie@0.5.0:
+ version "0.5.0"
+ resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
+ integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
+
+copy-text-to-clipboard@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz#8cbf8f90e0a47f12e4a24743736265d157bce69c"
+ integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q==
+
+copy-webpack-plugin@^11.0.0:
+ version "11.0.0"
+ resolved "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz#96d4dbdb5f73d02dd72d0528d1958721ab72e04a"
+ integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==
+ dependencies:
+ fast-glob "^3.2.11"
+ glob-parent "^6.0.1"
+ globby "^13.1.1"
+ normalize-path "^3.0.0"
+ schema-utils "^4.0.0"
+ serialize-javascript "^6.0.0"
+
+core-js-compat@^3.21.0, core-js-compat@^3.22.1:
+ version "3.23.5"
+ resolved "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.23.5.tgz#11edce2f1c4f69a96d30ce77c805ce118909cd5b"
+ integrity sha512-fHYozIFIxd+91IIbXJgWd/igXIc8Mf9is0fusswjnGIWVG96y2cwyUdlCkGOw6rMLHKAxg7xtCIVaHsyOUnJIg==
+ dependencies:
+ browserslist "^4.21.2"
+ semver "7.0.0"
+
+core-js-pure@^3.20.2:
+ version "3.23.5"
+ resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.23.5.tgz#23daaa9af9230e50f10b0fa4b8e6b87402be4c33"
+ integrity sha512-8t78LdpKSuCq4pJYCYk8hl7XEkAX+BP16yRIwL3AanTksxuEf7CM83vRyctmiEL8NDZ3jpUcv56fk9/zG3aIuw==
+
+core-js@^3.23.3:
+ version "3.23.5"
+ resolved "https://registry.npmjs.org/core-js/-/core-js-3.23.5.tgz#1f82b0de5eece800827a2f59d597509c67650475"
+ integrity sha512-7Vh11tujtAZy82da4duVreQysIoO2EvVrur7y6IzZkH1IHPSekuDi8Vuw1+YKjkbfWLRD7Nc9ICQ/sIUDutcyg==
+
+core-util-is@~1.0.0:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
+ integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
+
+cosmiconfig@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982"
+ integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==
+ dependencies:
+ "@types/parse-json" "^4.0.0"
+ import-fresh "^3.1.0"
+ parse-json "^5.0.0"
+ path-type "^4.0.0"
+ yaml "^1.7.2"
+
+cosmiconfig@^7.0.0, cosmiconfig@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d"
+ integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==
+ dependencies:
+ "@types/parse-json" "^4.0.0"
+ import-fresh "^3.2.1"
+ parse-json "^5.0.0"
+ path-type "^4.0.0"
+ yaml "^1.10.0"
+
+cross-fetch@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f"
+ integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==
+ dependencies:
+ node-fetch "2.6.7"
+
+cross-spawn@^7.0.2, cross-spawn@^7.0.3:
+ version "7.0.3"
+ resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
+ integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
+ dependencies:
+ path-key "^3.1.0"
+ shebang-command "^2.0.0"
+ which "^2.0.1"
+
+crypto-random-string@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
+ integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
+
+css-declaration-sorter@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.0.tgz#72ebd995c8f4532ff0036631f7365cce9759df14"
+ integrity sha512-OGT677UGHJTAVMRhPO+HJ4oKln3wkBTwtDFH0ojbqm+MJm6xuDMHp2nkhh/ThaBqq20IbraBQSWKfSLNHQO9Og==
+
+css-functions-list@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/css-functions-list/-/css-functions-list-3.1.0.tgz#cf5b09f835ad91a00e5959bcfc627cd498e1321b"
+ integrity sha512-/9lCvYZaUbBGvYUgYGFJ4dcYiyqdhSjG7IPVluoV8A1ILjkF7ilmhp1OGUz8n+nmBcu0RNrQAzgD8B6FJbrt2w==
+
+css-loader@^6.7.1:
+ version "6.7.1"
+ resolved "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e"
+ integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==
+ dependencies:
+ icss-utils "^5.1.0"
+ postcss "^8.4.7"
+ postcss-modules-extract-imports "^3.0.0"
+ postcss-modules-local-by-default "^4.0.0"
+ postcss-modules-scope "^3.0.0"
+ postcss-modules-values "^4.0.0"
+ postcss-value-parser "^4.2.0"
+ semver "^7.3.5"
+
+css-minimizer-webpack-plugin@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz#e11800388c19c2b7442c39cc78ac8ae3675c9605"
+ integrity sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA==
+ dependencies:
+ cssnano "^5.1.8"
+ jest-worker "^27.5.1"
+ postcss "^8.4.13"
+ schema-utils "^4.0.0"
+ serialize-javascript "^6.0.0"
+ source-map "^0.6.1"
+
+css-select@^4.1.3:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b"
+ integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==
+ dependencies:
+ boolbase "^1.0.0"
+ css-what "^6.0.1"
+ domhandler "^4.3.1"
+ domutils "^2.8.0"
+ nth-check "^2.0.1"
+
+css-select@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6"
+ integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==
+ dependencies:
+ boolbase "^1.0.0"
+ css-what "^6.1.0"
+ domhandler "^5.0.2"
+ domutils "^3.0.1"
+ nth-check "^2.0.1"
+
+css-tree@^1.1.2, css-tree@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d"
+ integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==
+ dependencies:
+ mdn-data "2.0.14"
+ source-map "^0.6.1"
+
+css-what@^6.0.1, css-what@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
+ integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
+
+cssesc@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
+ integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
+
+cssnano-preset-advanced@^5.3.8:
+ version "5.3.8"
+ resolved "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz#027b1d05ef896d908178c483f0ec4190cb50ef9a"
+ integrity sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg==
+ dependencies:
+ autoprefixer "^10.3.7"
+ cssnano-preset-default "^5.2.12"
+ postcss-discard-unused "^5.1.0"
+ postcss-merge-idents "^5.1.1"
+ postcss-reduce-idents "^5.2.0"
+ postcss-zindex "^5.1.0"
+
+cssnano-preset-default@^5.2.12:
+ version "5.2.12"
+ resolved "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97"
+ integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew==
+ dependencies:
+ css-declaration-sorter "^6.3.0"
+ cssnano-utils "^3.1.0"
+ postcss-calc "^8.2.3"
+ postcss-colormin "^5.3.0"
+ postcss-convert-values "^5.1.2"
+ postcss-discard-comments "^5.1.2"
+ postcss-discard-duplicates "^5.1.0"
+ postcss-discard-empty "^5.1.1"
+ postcss-discard-overridden "^5.1.0"
+ postcss-merge-longhand "^5.1.6"
+ postcss-merge-rules "^5.1.2"
+ postcss-minify-font-values "^5.1.0"
+ postcss-minify-gradients "^5.1.1"
+ postcss-minify-params "^5.1.3"
+ postcss-minify-selectors "^5.2.1"
+ postcss-normalize-charset "^5.1.0"
+ postcss-normalize-display-values "^5.1.0"
+ postcss-normalize-positions "^5.1.1"
+ postcss-normalize-repeat-style "^5.1.1"
+ postcss-normalize-string "^5.1.0"
+ postcss-normalize-timing-functions "^5.1.0"
+ postcss-normalize-unicode "^5.1.0"
+ postcss-normalize-url "^5.1.0"
+ postcss-normalize-whitespace "^5.1.1"
+ postcss-ordered-values "^5.1.3"
+ postcss-reduce-initial "^5.1.0"
+ postcss-reduce-transforms "^5.1.0"
+ postcss-svgo "^5.1.0"
+ postcss-unique-selectors "^5.1.1"
+
+cssnano-utils@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861"
+ integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==
+
+cssnano@^5.1.12, cssnano@^5.1.8:
+ version "5.1.12"
+ resolved "https://registry.npmjs.org/cssnano/-/cssnano-5.1.12.tgz#bcd0b64d6be8692de79332c501daa7ece969816c"
+ integrity sha512-TgvArbEZu0lk/dvg2ja+B7kYoD7BBCmn3+k58xD0qjrGHsFzXY/wKTo9M5egcUCabPol05e/PVoIu79s2JN4WQ==
+ dependencies:
+ cssnano-preset-default "^5.2.12"
+ lilconfig "^2.0.3"
+ yaml "^1.10.2"
+
+csso@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529"
+ integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==
+ dependencies:
+ css-tree "^1.1.2"
+
+csstype@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2"
+ integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==
+
+damerau-levenshtein@^1.0.8:
+ version "1.0.8"
+ resolved "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7"
+ integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==
+
+debug@2.6.9, debug@^2.6.0, debug@^2.6.9:
+ version "2.6.9"
+ resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+debug@^3.2.7:
+ version "3.2.7"
+ resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a"
+ integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==
+ dependencies:
+ ms "^2.1.1"
+
+debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4:
+ version "4.3.4"
+ resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
+ integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
+ dependencies:
+ ms "2.1.2"
+
+decamelize-keys@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9"
+ integrity sha512-ocLWuYzRPoS9bfiSdDd3cxvrzovVMZnRDVEzAs+hWIVXGDbHxWMECij2OBuyB/An0FFW/nLuq6Kv1i/YC5Qfzg==
+ dependencies:
+ decamelize "^1.1.0"
+ map-obj "^1.0.0"
+
+decamelize@^1.1.0, decamelize@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
+ integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==
+
+decompress-response@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3"
+ integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==
+ dependencies:
+ mimic-response "^1.0.0"
+
+deep-extend@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
+ integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
+
+deep-is@^0.1.3:
+ version "0.1.4"
+ resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
+ integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==
+
+deepmerge@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
+ integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
+
+default-gateway@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71"
+ integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==
+ dependencies:
+ execa "^5.0.0"
+
+defer-to-connect@^1.0.1:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
+ integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==
+
+define-lazy-prop@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f"
+ integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==
+
+define-properties@^1.1.3, define-properties@^1.1.4:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1"
+ integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==
+ dependencies:
+ has-property-descriptors "^1.0.0"
+ object-keys "^1.1.1"
+
+del@^6.1.1:
+ version "6.1.1"
+ resolved "https://registry.npmjs.org/del/-/del-6.1.1.tgz#3b70314f1ec0aa325c6b14eb36b95786671edb7a"
+ integrity sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==
+ dependencies:
+ globby "^11.0.1"
+ graceful-fs "^4.2.4"
+ is-glob "^4.0.1"
+ is-path-cwd "^2.2.0"
+ is-path-inside "^3.0.2"
+ p-map "^4.0.0"
+ rimraf "^3.0.2"
+ slash "^3.0.0"
+
+depd@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
+ integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
+
+depd@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
+ integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==
+
+destroy@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
+ integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==
+
+detab@2.0.4:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz#b927892069aff405fbb9a186fe97a44a92a94b43"
+ integrity sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==
+ dependencies:
+ repeat-string "^1.5.4"
+
+detect-node@^2.0.4:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1"
+ integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==
+
+detect-port-alt@^1.1.6:
+ version "1.1.6"
+ resolved "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275"
+ integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==
+ dependencies:
+ address "^1.0.1"
+ debug "^2.6.0"
+
+detect-port@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/detect-port/-/detect-port-1.3.0.tgz#d9c40e9accadd4df5cac6a782aefd014d573d1f1"
+ integrity sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ==
+ dependencies:
+ address "^1.0.1"
+ debug "^2.6.0"
+
+dir-glob@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
+ integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==
+ dependencies:
+ path-type "^4.0.0"
+
+dns-equal@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d"
+ integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==
+
+dns-packet@^5.2.2:
+ version "5.4.0"
+ resolved "https://registry.npmjs.org/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b"
+ integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==
+ dependencies:
+ "@leichtgewicht/ip-codec" "^2.0.1"
+
+doctrine@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
+ integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==
+ dependencies:
+ esutils "^2.0.2"
+
+doctrine@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
+ integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
+ dependencies:
+ esutils "^2.0.2"
+
+dom-converter@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768"
+ integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==
+ dependencies:
+ utila "~0.4"
+
+dom-serializer@^1.0.1:
+ version "1.4.1"
+ resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30"
+ integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==
+ dependencies:
+ domelementtype "^2.0.1"
+ domhandler "^4.2.0"
+ entities "^2.0.0"
+
+dom-serializer@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53"
+ integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==
+ dependencies:
+ domelementtype "^2.3.0"
+ domhandler "^5.0.2"
+ entities "^4.2.0"
+
+domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d"
+ integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==
+
+domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1:
+ version "4.3.1"
+ resolved "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c"
+ integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==
+ dependencies:
+ domelementtype "^2.2.0"
+
+domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3:
+ version "5.0.3"
+ resolved "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31"
+ integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==
+ dependencies:
+ domelementtype "^2.3.0"
+
+domutils@^2.5.2, domutils@^2.8.0:
+ version "2.8.0"
+ resolved "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135"
+ integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==
+ dependencies:
+ dom-serializer "^1.0.1"
+ domelementtype "^2.2.0"
+ domhandler "^4.2.0"
+
+domutils@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz#696b3875238338cb186b6c0612bd4901c89a4f1c"
+ integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==
+ dependencies:
+ dom-serializer "^2.0.0"
+ domelementtype "^2.3.0"
+ domhandler "^5.0.1"
+
+dot-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751"
+ integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+dot-prop@^5.2.0:
+ version "5.3.0"
+ resolved "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88"
+ integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==
+ dependencies:
+ is-obj "^2.0.0"
+
+duplexer3@^0.1.4:
+ version "0.1.5"
+ resolved "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e"
+ integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==
+
+duplexer@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
+ integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==
+
+eastasianwidth@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb"
+ integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==
+
+ee-first@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+ integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==
+
+electron-to-chromium@^1.4.188:
+ version "1.4.196"
+ resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.196.tgz#e18cdc5c1c2c2ebf78da237d0c374cc3b244d4cb"
+ integrity sha512-uxMa/Dt7PQsLBVXwH+t6JvpHJnrsYBaxWKi/J6HE+/nBtoHENhwBoNkgkm226/Kfxeg0z1eMQLBRPPKcDH8xWA==
+
+emoji-regex@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
+ integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
+
+emoji-regex@^9.2.2:
+ version "9.2.2"
+ resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72"
+ integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==
+
+emojis-list@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
+ integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
+
+emoticon@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz#c008ca7d7620fac742fe1bf4af8ff8fed154ae7f"
+ integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg==
+
+encodeurl@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
+ integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
+
+end-of-stream@^1.1.0:
+ version "1.4.4"
+ resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
+ integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
+ dependencies:
+ once "^1.4.0"
+
+enhanced-resolve@^5.9.3:
+ version "5.10.0"
+ resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6"
+ integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==
+ dependencies:
+ graceful-fs "^4.2.4"
+ tapable "^2.2.0"
+
+entities@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
+ integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==
+
+entities@^4.2.0, entities@^4.3.0:
+ version "4.3.1"
+ resolved "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz#c34062a94c865c322f9d67b4384e4169bcede6a4"
+ integrity sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==
+
+error-ex@^1.3.1:
+ version "1.3.2"
+ resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
+ integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
+ dependencies:
+ is-arrayish "^0.2.1"
+
+es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5:
+ version "1.20.1"
+ resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814"
+ integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==
+ dependencies:
+ call-bind "^1.0.2"
+ es-to-primitive "^1.2.1"
+ function-bind "^1.1.1"
+ function.prototype.name "^1.1.5"
+ get-intrinsic "^1.1.1"
+ get-symbol-description "^1.0.0"
+ has "^1.0.3"
+ has-property-descriptors "^1.0.0"
+ has-symbols "^1.0.3"
+ internal-slot "^1.0.3"
+ is-callable "^1.2.4"
+ is-negative-zero "^2.0.2"
+ is-regex "^1.1.4"
+ is-shared-array-buffer "^1.0.2"
+ is-string "^1.0.7"
+ is-weakref "^1.0.2"
+ object-inspect "^1.12.0"
+ object-keys "^1.1.1"
+ object.assign "^4.1.2"
+ regexp.prototype.flags "^1.4.3"
+ string.prototype.trimend "^1.0.5"
+ string.prototype.trimstart "^1.0.5"
+ unbox-primitive "^1.0.2"
+
+es-module-lexer@^0.9.0:
+ version "0.9.3"
+ resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19"
+ integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==
+
+es-shim-unscopables@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241"
+ integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==
+ dependencies:
+ has "^1.0.3"
+
+es-to-primitive@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
+ integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
+ dependencies:
+ is-callable "^1.1.4"
+ is-date-object "^1.0.1"
+ is-symbol "^1.0.2"
+
+escalade@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
+ integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
+
+escape-goat@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
+ integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
+
+escape-html@^1.0.3, escape-html@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
+ integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==
+
+escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+ integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
+
+escape-string-regexp@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
+ integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
+
+eslint-config-airbnb-base@^15.0.0:
+ version "15.0.0"
+ resolved "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236"
+ integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==
+ dependencies:
+ confusing-browser-globals "^1.0.10"
+ object.assign "^4.1.2"
+ object.entries "^1.1.5"
+ semver "^6.3.0"
+
+eslint-config-airbnb@^19.0.4:
+ version "19.0.4"
+ resolved "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz#84d4c3490ad70a0ffa571138ebcdea6ab085fdc3"
+ integrity sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew==
+ dependencies:
+ eslint-config-airbnb-base "^15.0.0"
+ object.assign "^4.1.2"
+ object.entries "^1.1.5"
+
+eslint-config-prettier@^8.5.0:
+ version "8.5.0"
+ resolved "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz#5a81680ec934beca02c7b1a61cf8ca34b66feab1"
+ integrity sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==
+
+eslint-import-resolver-node@^0.3.6:
+ version "0.3.6"
+ resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd"
+ integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==
+ dependencies:
+ debug "^3.2.7"
+ resolve "^1.20.0"
+
+eslint-module-utils@^2.7.3:
+ version "2.7.3"
+ resolved "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee"
+ integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ==
+ dependencies:
+ debug "^3.2.7"
+ find-up "^2.1.0"
+
+eslint-plugin-header@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/eslint-plugin-header/-/eslint-plugin-header-3.1.1.tgz#6ce512432d57675265fac47292b50d1eff11acd6"
+ integrity sha512-9vlKxuJ4qf793CmeeSrZUvVClw6amtpghq3CuWcB5cUNnWHQhgcqy5eF8oVKFk1G3Y/CbchGfEaw3wiIJaNmVg==
+
+eslint-plugin-import@^2.26.0:
+ version "2.26.0"
+ resolved "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b"
+ integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==
+ dependencies:
+ array-includes "^3.1.4"
+ array.prototype.flat "^1.2.5"
+ debug "^2.6.9"
+ doctrine "^2.1.0"
+ eslint-import-resolver-node "^0.3.6"
+ eslint-module-utils "^2.7.3"
+ has "^1.0.3"
+ is-core-module "^2.8.1"
+ is-glob "^4.0.3"
+ minimatch "^3.1.2"
+ object.values "^1.1.5"
+ resolve "^1.22.0"
+ tsconfig-paths "^3.14.1"
+
+eslint-plugin-jsx-a11y@^6.6.0:
+ version "6.6.0"
+ resolved "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.0.tgz#2c5ac12e013eb98337b9aa261c3b355275cc6415"
+ integrity sha512-kTeLuIzpNhXL2CwLlc8AHI0aFRwWHcg483yepO9VQiHzM9bZwJdzTkzBszbuPrbgGmq2rlX/FaT2fJQsjUSHsw==
+ dependencies:
+ "@babel/runtime" "^7.18.3"
+ aria-query "^4.2.2"
+ array-includes "^3.1.5"
+ ast-types-flow "^0.0.7"
+ axe-core "^4.4.2"
+ axobject-query "^2.2.0"
+ damerau-levenshtein "^1.0.8"
+ emoji-regex "^9.2.2"
+ has "^1.0.3"
+ jsx-ast-utils "^3.3.1"
+ language-tags "^1.0.5"
+ minimatch "^3.1.2"
+ semver "^6.3.0"
+
+eslint-plugin-react-hooks@^4.6.0:
+ version "4.6.0"
+ resolved "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3"
+ integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==
+
+eslint-plugin-react@^7.30.1:
+ version "7.30.1"
+ resolved "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.30.1.tgz#2be4ab23ce09b5949c6631413ba64b2810fd3e22"
+ integrity sha512-NbEvI9jtqO46yJA3wcRF9Mo0lF9T/jhdHqhCHXiXtD+Zcb98812wvokjWpU7Q4QH5edo6dmqrukxVvWWXHlsUg==
+ dependencies:
+ array-includes "^3.1.5"
+ array.prototype.flatmap "^1.3.0"
+ doctrine "^2.1.0"
+ estraverse "^5.3.0"
+ jsx-ast-utils "^2.4.1 || ^3.0.0"
+ minimatch "^3.1.2"
+ object.entries "^1.1.5"
+ object.fromentries "^2.0.5"
+ object.hasown "^1.1.1"
+ object.values "^1.1.5"
+ prop-types "^15.8.1"
+ resolve "^2.0.0-next.3"
+ semver "^6.3.0"
+ string.prototype.matchall "^4.0.7"
+
+eslint-scope@5.1.1, eslint-scope@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c"
+ integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==
+ dependencies:
+ esrecurse "^4.3.0"
+ estraverse "^4.1.1"
+
+eslint-scope@^7.1.1:
+ version "7.1.1"
+ resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642"
+ integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==
+ dependencies:
+ esrecurse "^4.3.0"
+ estraverse "^5.2.0"
+
+eslint-utils@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672"
+ integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==
+ dependencies:
+ eslint-visitor-keys "^2.0.0"
+
+eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303"
+ integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==
+
+eslint-visitor-keys@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
+ integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
+
+eslint@^8.19.0:
+ version "8.20.0"
+ resolved "https://registry.npmjs.org/eslint/-/eslint-8.20.0.tgz#048ac56aa18529967da8354a478be4ec0a2bc81b"
+ integrity sha512-d4ixhz5SKCa1D6SCPrivP7yYVi7nyD6A4vs6HIAul9ujBzcEmZVM3/0NN/yu5nKhmO1wjp5xQ46iRfmDGlOviA==
+ dependencies:
+ "@eslint/eslintrc" "^1.3.0"
+ "@humanwhocodes/config-array" "^0.9.2"
+ ajv "^6.10.0"
+ chalk "^4.0.0"
+ cross-spawn "^7.0.2"
+ debug "^4.3.2"
+ doctrine "^3.0.0"
+ escape-string-regexp "^4.0.0"
+ eslint-scope "^7.1.1"
+ eslint-utils "^3.0.0"
+ eslint-visitor-keys "^3.3.0"
+ espree "^9.3.2"
+ esquery "^1.4.0"
+ esutils "^2.0.2"
+ fast-deep-equal "^3.1.3"
+ file-entry-cache "^6.0.1"
+ functional-red-black-tree "^1.0.1"
+ glob-parent "^6.0.1"
+ globals "^13.15.0"
+ ignore "^5.2.0"
+ import-fresh "^3.0.0"
+ imurmurhash "^0.1.4"
+ is-glob "^4.0.0"
+ js-yaml "^4.1.0"
+ json-stable-stringify-without-jsonify "^1.0.1"
+ levn "^0.4.1"
+ lodash.merge "^4.6.2"
+ minimatch "^3.1.2"
+ natural-compare "^1.4.0"
+ optionator "^0.9.1"
+ regexpp "^3.2.0"
+ strip-ansi "^6.0.1"
+ strip-json-comments "^3.1.0"
+ text-table "^0.2.0"
+ v8-compile-cache "^2.0.3"
+
+espree@^9.3.2:
+ version "9.3.2"
+ resolved "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596"
+ integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==
+ dependencies:
+ acorn "^8.7.1"
+ acorn-jsx "^5.3.2"
+ eslint-visitor-keys "^3.3.0"
+
+esprima@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
+ integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
+
+esquery@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5"
+ integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==
+ dependencies:
+ estraverse "^5.1.0"
+
+esrecurse@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
+ integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
+ dependencies:
+ estraverse "^5.2.0"
+
+estraverse@^4.1.1:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
+ integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
+
+estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0:
+ version "5.3.0"
+ resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
+ integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
+
+esutils@^2.0.2:
+ version "2.0.3"
+ resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+ integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+
+eta@^1.12.3:
+ version "1.12.3"
+ resolved "https://registry.npmjs.org/eta/-/eta-1.12.3.tgz#2982d08adfbef39f9fa50e2fbd42d7337e7338b1"
+ integrity sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg==
+
+etag@~1.8.1:
+ version "1.8.1"
+ resolved "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
+ integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==
+
+eval@^0.1.8:
+ version "0.1.8"
+ resolved "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz#2b903473b8cc1d1989b83a1e7923f883eb357f85"
+ integrity sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==
+ dependencies:
+ "@types/node" "*"
+ require-like ">= 0.1.1"
+
+eventemitter3@^4.0.0:
+ version "4.0.7"
+ resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
+ integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
+
+events@^3.2.0:
+ version "3.3.0"
+ resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
+ integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
+
+execa@^5.0.0:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
+ integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
+ dependencies:
+ cross-spawn "^7.0.3"
+ get-stream "^6.0.0"
+ human-signals "^2.1.0"
+ is-stream "^2.0.0"
+ merge-stream "^2.0.0"
+ npm-run-path "^4.0.1"
+ onetime "^5.1.2"
+ signal-exit "^3.0.3"
+ strip-final-newline "^2.0.0"
+
+execall@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/execall/-/execall-2.0.0.tgz#16a06b5fe5099df7d00be5d9c06eecded1663b45"
+ integrity sha512-0FU2hZ5Hh6iQnarpRtQurM/aAvp3RIbfvgLHrcqJYzhXyV2KFruhuChf9NC6waAhiUR7FFtlugkI4p7f2Fqlow==
+ dependencies:
+ clone-regexp "^2.1.0"
+
+express@^4.17.3:
+ version "4.18.1"
+ resolved "https://registry.npmjs.org/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf"
+ integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==
+ dependencies:
+ accepts "~1.3.8"
+ array-flatten "1.1.1"
+ body-parser "1.20.0"
+ content-disposition "0.5.4"
+ content-type "~1.0.4"
+ cookie "0.5.0"
+ cookie-signature "1.0.6"
+ debug "2.6.9"
+ depd "2.0.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ finalhandler "1.2.0"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ merge-descriptors "1.0.1"
+ methods "~1.1.2"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ path-to-regexp "0.1.7"
+ proxy-addr "~2.0.7"
+ qs "6.10.3"
+ range-parser "~1.2.1"
+ safe-buffer "5.2.1"
+ send "0.18.0"
+ serve-static "1.15.0"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ type-is "~1.6.18"
+ utils-merge "1.0.1"
+ vary "~1.1.2"
+
+extend-shallow@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
+ integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==
+ dependencies:
+ is-extendable "^0.1.0"
+
+extend@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
+ integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
+
+fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
+ integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
+
+fast-glob@^3.2.11, fast-glob@^3.2.9:
+ version "3.2.11"
+ resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9"
+ integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==
+ dependencies:
+ "@nodelib/fs.stat" "^2.0.2"
+ "@nodelib/fs.walk" "^1.2.3"
+ glob-parent "^5.1.2"
+ merge2 "^1.3.0"
+ micromatch "^4.0.4"
+
+fast-json-stable-stringify@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
+ integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
+
+fast-levenshtein@^2.0.6:
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
+ integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
+
+fast-url-parser@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d"
+ integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==
+ dependencies:
+ punycode "^1.3.2"
+
+fastest-levenshtein@^1.0.12:
+ version "1.0.14"
+ resolved "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.14.tgz#9054384e4b7a78c88d01a4432dc18871af0ac859"
+ integrity sha512-tFfWHjnuUfKE186Tfgr+jtaFc0mZTApEgKDOeyN+FwOqRkO/zK/3h1AiRd8u8CY53owL3CUmGr/oI9p/RdyLTA==
+
+fastq@^1.6.0:
+ version "1.13.0"
+ resolved "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c"
+ integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==
+ dependencies:
+ reusify "^1.0.4"
+
+faye-websocket@^0.11.3:
+ version "0.11.4"
+ resolved "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da"
+ integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==
+ dependencies:
+ websocket-driver ">=0.5.1"
+
+fbemitter@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz#00b2a1af5411254aab416cd75f9e6289bee4bff3"
+ integrity sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==
+ dependencies:
+ fbjs "^3.0.0"
+
+fbjs-css-vars@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz#216551136ae02fe255932c3ec8775f18e2c078b8"
+ integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==
+
+fbjs@^3.0.0, fbjs@^3.0.1:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz#e1871c6bd3083bac71ff2da868ad5067d37716c6"
+ integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==
+ dependencies:
+ cross-fetch "^3.1.5"
+ fbjs-css-vars "^1.0.0"
+ loose-envify "^1.0.0"
+ object-assign "^4.1.0"
+ promise "^7.1.1"
+ setimmediate "^1.0.5"
+ ua-parser-js "^0.7.30"
+
+feed@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz#865783ef6ed12579e2c44bbef3c9113bc4956a7e"
+ integrity sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==
+ dependencies:
+ xml-js "^1.6.11"
+
+file-entry-cache@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
+ integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
+ dependencies:
+ flat-cache "^3.0.4"
+
+file-loader@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d"
+ integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==
+ dependencies:
+ loader-utils "^2.0.0"
+ schema-utils "^3.0.0"
+
+filesize@^8.0.6:
+ version "8.0.7"
+ resolved "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8"
+ integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==
+
+fill-range@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
+ integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
+ dependencies:
+ to-regex-range "^5.0.1"
+
+finalhandler@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32"
+ integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
+ dependencies:
+ debug "2.6.9"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ statuses "2.0.1"
+ unpipe "~1.0.0"
+
+find-cache-dir@^3.3.1:
+ version "3.3.2"
+ resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b"
+ integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==
+ dependencies:
+ commondir "^1.0.1"
+ make-dir "^3.0.2"
+ pkg-dir "^4.1.0"
+
+find-up@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
+ integrity sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==
+ dependencies:
+ locate-path "^2.0.0"
+
+find-up@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
+ integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
+ dependencies:
+ locate-path "^3.0.0"
+
+find-up@^4.0.0, find-up@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
+ integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
+ dependencies:
+ locate-path "^5.0.0"
+ path-exists "^4.0.0"
+
+find-up@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
+ integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==
+ dependencies:
+ locate-path "^6.0.0"
+ path-exists "^4.0.0"
+
+flat-cache@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11"
+ integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==
+ dependencies:
+ flatted "^3.1.0"
+ rimraf "^3.0.2"
+
+flatted@^3.1.0:
+ version "3.2.6"
+ resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2"
+ integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==
+
+flux@^4.0.1:
+ version "4.0.3"
+ resolved "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz#573b504a24982c4768fdfb59d8d2ea5637d72ee7"
+ integrity sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw==
+ dependencies:
+ fbemitter "^3.0.0"
+ fbjs "^3.0.1"
+
+follow-redirects@^1.0.0, follow-redirects@^1.14.7:
+ version "1.15.1"
+ resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5"
+ integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==
+
+fork-ts-checker-webpack-plugin@^6.5.0:
+ version "6.5.2"
+ resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340"
+ integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==
+ dependencies:
+ "@babel/code-frame" "^7.8.3"
+ "@types/json-schema" "^7.0.5"
+ chalk "^4.1.0"
+ chokidar "^3.4.2"
+ cosmiconfig "^6.0.0"
+ deepmerge "^4.2.2"
+ fs-extra "^9.0.0"
+ glob "^7.1.6"
+ memfs "^3.1.2"
+ minimatch "^3.0.4"
+ schema-utils "2.7.0"
+ semver "^7.3.2"
+ tapable "^1.0.0"
+
+forwarded@0.2.0:
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
+ integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
+
+fraction.js@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950"
+ integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==
+
+fresh@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
+ integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==
+
+fs-extra@^10.1.0:
+ version "10.1.0"
+ resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
+ integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==
+ dependencies:
+ graceful-fs "^4.2.0"
+ jsonfile "^6.0.1"
+ universalify "^2.0.0"
+
+fs-extra@^9.0.0:
+ version "9.1.0"
+ resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d"
+ integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==
+ dependencies:
+ at-least-node "^1.0.0"
+ graceful-fs "^4.2.0"
+ jsonfile "^6.0.1"
+ universalify "^2.0.0"
+
+fs-monkey@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3"
+ integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+ integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
+
+fsevents@~2.3.2:
+ version "2.3.2"
+ resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
+ integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
+
+function-bind@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
+ integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
+
+function.prototype.name@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621"
+ integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.0"
+ functions-have-names "^1.2.2"
+
+functional-red-black-tree@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
+ integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==
+
+functions-have-names@^1.2.2:
+ version "1.2.3"
+ resolved "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834"
+ integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==
+
+gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2:
+ version "1.0.0-beta.2"
+ resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
+ integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==
+
+get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598"
+ integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==
+ dependencies:
+ function-bind "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.3"
+
+get-own-enumerable-property-symbols@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
+ integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
+
+get-stdin@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53"
+ integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==
+
+get-stream@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
+ integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
+ dependencies:
+ pump "^3.0.0"
+
+get-stream@^5.1.0:
+ version "5.2.0"
+ resolved "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3"
+ integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==
+ dependencies:
+ pump "^3.0.0"
+
+get-stream@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7"
+ integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==
+
+get-symbol-description@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6"
+ integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==
+ dependencies:
+ call-bind "^1.0.2"
+ get-intrinsic "^1.1.1"
+
+github-slugger@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz#206eb96cdb22ee56fdc53a28d5a302338463444e"
+ integrity sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==
+
+glob-parent@^5.1.2, glob-parent@~5.1.2:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+ integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
+ dependencies:
+ is-glob "^4.0.1"
+
+glob-parent@^6.0.1:
+ version "6.0.2"
+ resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
+ integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
+ dependencies:
+ is-glob "^4.0.3"
+
+glob-to-regexp@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e"
+ integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
+
+glob@^7.0.0, glob@^7.1.3, glob@^7.1.6:
+ version "7.2.3"
+ resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+ integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.1.1"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+global-dirs@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz#70a76fe84ea315ab37b1f5576cbde7d48ef72686"
+ integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==
+ dependencies:
+ ini "2.0.0"
+
+global-modules@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780"
+ integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==
+ dependencies:
+ global-prefix "^3.0.0"
+
+global-prefix@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97"
+ integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==
+ dependencies:
+ ini "^1.3.5"
+ kind-of "^6.0.2"
+ which "^1.3.1"
+
+globals@^11.1.0:
+ version "11.12.0"
+ resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
+ integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
+
+globals@^13.15.0:
+ version "13.16.0"
+ resolved "https://registry.npmjs.org/globals/-/globals-13.16.0.tgz#9be4aca28f311aaeb974ea54978ebbb5e35ce46a"
+ integrity sha512-A1lrQfpNF+McdPOnnFqY3kSN0AFTy485bTi1bkLk4mVPODIUEcSfhHgRqA+QdXPksrSTTztYXx37NFV+GpGk3Q==
+ dependencies:
+ type-fest "^0.20.2"
+
+globby@^11.0.1, globby@^11.0.4, globby@^11.1.0:
+ version "11.1.0"
+ resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b"
+ integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==
+ dependencies:
+ array-union "^2.1.0"
+ dir-glob "^3.0.1"
+ fast-glob "^3.2.9"
+ ignore "^5.2.0"
+ merge2 "^1.4.1"
+ slash "^3.0.0"
+
+globby@^13.1.1:
+ version "13.1.2"
+ resolved "https://registry.npmjs.org/globby/-/globby-13.1.2.tgz#29047105582427ab6eca4f905200667b056da515"
+ integrity sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==
+ dependencies:
+ dir-glob "^3.0.1"
+ fast-glob "^3.2.11"
+ ignore "^5.2.0"
+ merge2 "^1.4.1"
+ slash "^4.0.0"
+
+globjoin@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.npmjs.org/globjoin/-/globjoin-0.1.4.tgz#2f4494ac8919e3767c5cbb691e9f463324285d43"
+ integrity sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==
+
+got@^9.6.0:
+ version "9.6.0"
+ resolved "https://registry.npmjs.org/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85"
+ integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==
+ dependencies:
+ "@sindresorhus/is" "^0.14.0"
+ "@szmarczak/http-timer" "^1.1.2"
+ cacheable-request "^6.0.0"
+ decompress-response "^3.3.0"
+ duplexer3 "^0.1.4"
+ get-stream "^4.1.0"
+ lowercase-keys "^1.0.1"
+ mimic-response "^1.0.1"
+ p-cancelable "^1.0.0"
+ to-readable-stream "^1.0.0"
+ url-parse-lax "^3.0.0"
+
+graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
+ version "4.2.10"
+ resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
+ integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
+
+gray-matter@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz#e893c064825de73ea1f5f7d88c7a9f7274288798"
+ integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==
+ dependencies:
+ js-yaml "^3.13.1"
+ kind-of "^6.0.2"
+ section-matter "^1.0.0"
+ strip-bom-string "^1.0.0"
+
+gzip-size@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462"
+ integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==
+ dependencies:
+ duplexer "^0.1.2"
+
+handle-thing@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e"
+ integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==
+
+hard-rejection@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883"
+ integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==
+
+has-bigints@^1.0.1, has-bigints@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa"
+ integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+ integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
+
+has-flag@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+ integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
+has-property-descriptors@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861"
+ integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==
+ dependencies:
+ get-intrinsic "^1.1.1"
+
+has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8"
+ integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==
+
+has-tostringtag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25"
+ integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==
+ dependencies:
+ has-symbols "^1.0.2"
+
+has-yarn@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77"
+ integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==
+
+has@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
+ integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
+ dependencies:
+ function-bind "^1.1.1"
+
+hast-to-hyperscript@^9.0.0:
+ version "9.0.1"
+ resolved "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d"
+ integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==
+ dependencies:
+ "@types/unist" "^2.0.3"
+ comma-separated-tokens "^1.0.0"
+ property-information "^5.3.0"
+ space-separated-tokens "^1.0.0"
+ style-to-object "^0.3.0"
+ unist-util-is "^4.0.0"
+ web-namespaces "^1.0.0"
+
+hast-util-from-parse5@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz#554e34abdeea25ac76f5bd950a1f0180e0b3bc2a"
+ integrity sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==
+ dependencies:
+ "@types/parse5" "^5.0.0"
+ hastscript "^6.0.0"
+ property-information "^5.0.0"
+ vfile "^4.0.0"
+ vfile-location "^3.2.0"
+ web-namespaces "^1.0.0"
+
+hast-util-parse-selector@^2.0.0:
+ version "2.2.5"
+ resolved "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz#d57c23f4da16ae3c63b3b6ca4616683313499c3a"
+ integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==
+
+hast-util-raw@6.0.1:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz#973b15930b7529a7b66984c98148b46526885977"
+ integrity sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==
+ dependencies:
+ "@types/hast" "^2.0.0"
+ hast-util-from-parse5 "^6.0.0"
+ hast-util-to-parse5 "^6.0.0"
+ html-void-elements "^1.0.0"
+ parse5 "^6.0.0"
+ unist-util-position "^3.0.0"
+ vfile "^4.0.0"
+ web-namespaces "^1.0.0"
+ xtend "^4.0.0"
+ zwitch "^1.0.0"
+
+hast-util-to-parse5@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz#1ec44650b631d72952066cea9b1445df699f8479"
+ integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==
+ dependencies:
+ hast-to-hyperscript "^9.0.0"
+ property-information "^5.0.0"
+ web-namespaces "^1.0.0"
+ xtend "^4.0.0"
+ zwitch "^1.0.0"
+
+hastscript@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz#e8768d7eac56c3fdeac8a92830d58e811e5bf640"
+ integrity sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==
+ dependencies:
+ "@types/hast" "^2.0.0"
+ comma-separated-tokens "^1.0.0"
+ hast-util-parse-selector "^2.0.0"
+ property-information "^5.0.0"
+ space-separated-tokens "^1.0.0"
+
+he@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
+ integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+
+history@^4.9.0:
+ version "4.10.1"
+ resolved "https://registry.npmjs.org/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3"
+ integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+ loose-envify "^1.2.0"
+ resolve-pathname "^3.0.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+ value-equal "^1.0.1"
+
+hoist-non-react-statics@^3.1.0:
+ version "3.3.2"
+ resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45"
+ integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==
+ dependencies:
+ react-is "^16.7.0"
+
+hosted-git-info@^2.1.4:
+ version "2.8.9"
+ resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
+ integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
+
+hosted-git-info@^4.0.1:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224"
+ integrity sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==
+ dependencies:
+ lru-cache "^6.0.0"
+
+hpack.js@^2.1.6:
+ version "2.1.6"
+ resolved "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2"
+ integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==
+ dependencies:
+ inherits "^2.0.1"
+ obuf "^1.0.0"
+ readable-stream "^2.0.1"
+ wbuf "^1.1.0"
+
+html-entities@^2.3.2:
+ version "2.3.3"
+ resolved "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46"
+ integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==
+
+html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab"
+ integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==
+ dependencies:
+ camel-case "^4.1.2"
+ clean-css "^5.2.2"
+ commander "^8.3.0"
+ he "^1.2.0"
+ param-case "^3.0.4"
+ relateurl "^0.2.7"
+ terser "^5.10.0"
+
+html-tags@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz#dbb3518d20b726524e4dd43de397eb0a95726961"
+ integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==
+
+html-void-elements@^1.0.0:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483"
+ integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==
+
+html-webpack-plugin@^5.5.0:
+ version "5.5.0"
+ resolved "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50"
+ integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw==
+ dependencies:
+ "@types/html-minifier-terser" "^6.0.0"
+ html-minifier-terser "^6.0.2"
+ lodash "^4.17.21"
+ pretty-error "^4.0.0"
+ tapable "^2.0.0"
+
+htmlparser2@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
+ integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==
+ dependencies:
+ domelementtype "^2.0.1"
+ domhandler "^4.0.0"
+ domutils "^2.5.2"
+ entities "^2.0.0"
+
+htmlparser2@^8.0.1:
+ version "8.0.1"
+ resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz#abaa985474fcefe269bc761a779b544d7196d010"
+ integrity sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==
+ dependencies:
+ domelementtype "^2.3.0"
+ domhandler "^5.0.2"
+ domutils "^3.0.1"
+ entities "^4.3.0"
+
+http-cache-semantics@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390"
+ integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==
+
+http-deceiver@^1.2.7:
+ version "1.2.7"
+ resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
+ integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==
+
+http-errors@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
+ integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
+ dependencies:
+ depd "2.0.0"
+ inherits "2.0.4"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ toidentifier "1.0.1"
+
+http-errors@~1.6.2:
+ version "1.6.3"
+ resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d"
+ integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==
+ dependencies:
+ depd "~1.1.2"
+ inherits "2.0.3"
+ setprototypeof "1.1.0"
+ statuses ">= 1.4.0 < 2"
+
+http-parser-js@>=0.5.1:
+ version "0.5.8"
+ resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3"
+ integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==
+
+http-proxy-middleware@^2.0.3:
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f"
+ integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==
+ dependencies:
+ "@types/http-proxy" "^1.17.8"
+ http-proxy "^1.18.1"
+ is-glob "^4.0.1"
+ is-plain-obj "^3.0.0"
+ micromatch "^4.0.2"
+
+http-proxy@^1.18.1:
+ version "1.18.1"
+ resolved "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549"
+ integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==
+ dependencies:
+ eventemitter3 "^4.0.0"
+ follow-redirects "^1.0.0"
+ requires-port "^1.0.0"
+
+human-signals@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
+ integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
+
+iconv-lite@0.4.24:
+ version "0.4.24"
+ resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
+ integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
+ dependencies:
+ safer-buffer ">= 2.1.2 < 3"
+
+icss-utils@^5.0.0, icss-utils@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae"
+ integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==
+
+ignore@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a"
+ integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==
+
+image-size@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz#d778b6d0ab75b2737c1556dd631652eb963bc486"
+ integrity sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg==
+ dependencies:
+ queue "6.0.2"
+
+immer@^9.0.7:
+ version "9.0.15"
+ resolved "https://registry.npmjs.org/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc"
+ integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==
+
+import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
+ integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
+ dependencies:
+ parent-module "^1.0.0"
+ resolve-from "^4.0.0"
+
+import-lazy@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
+ integrity sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==
+
+import-lazy@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153"
+ integrity sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==
+
+imurmurhash@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+ integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
+
+indent-string@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
+ integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
+
+infima@0.2.0-alpha.42:
+ version "0.2.0-alpha.42"
+ resolved "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.42.tgz#f6e86a655ad40877c6b4d11b2ede681eb5470aa5"
+ integrity sha512-ift8OXNbQQwtbIt6z16KnSWP7uJ/SysSMFI4F87MNRTicypfl4Pv3E2OGVv6N3nSZFJvA8imYulCBS64iyHYww==
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+ integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
+inherits@2.0.3:
+ version "2.0.3"
+ resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
+ integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==
+
+ini@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
+ integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
+
+ini@^1.3.5, ini@~1.3.0:
+ version "1.3.8"
+ resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
+ integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
+
+inline-style-parser@0.1.1:
+ version "0.1.1"
+ resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1"
+ integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==
+
+internal-slot@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c"
+ integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==
+ dependencies:
+ get-intrinsic "^1.1.0"
+ has "^1.0.3"
+ side-channel "^1.0.4"
+
+interpret@^1.0.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e"
+ integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==
+
+invariant@^2.2.4:
+ version "2.2.4"
+ resolved "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6"
+ integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==
+ dependencies:
+ loose-envify "^1.0.0"
+
+ipaddr.js@1.9.1:
+ version "1.9.1"
+ resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
+ integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
+
+ipaddr.js@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0"
+ integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==
+
+is-alphabetical@1.0.4, is-alphabetical@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d"
+ integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==
+
+is-alphanumerical@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz#7eb9a2431f855f6b1ef1a78e326df515696c4dbf"
+ integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==
+ dependencies:
+ is-alphabetical "^1.0.0"
+ is-decimal "^1.0.0"
+
+is-arrayish@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
+ integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
+
+is-bigint@^1.0.1:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3"
+ integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==
+ dependencies:
+ has-bigints "^1.0.1"
+
+is-binary-path@~2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
+ integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
+ dependencies:
+ binary-extensions "^2.0.0"
+
+is-boolean-object@^1.1.0:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719"
+ integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==
+ dependencies:
+ call-bind "^1.0.2"
+ has-tostringtag "^1.0.0"
+
+is-buffer@^2.0.0:
+ version "2.0.5"
+ resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191"
+ integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==
+
+is-callable@^1.1.4, is-callable@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945"
+ integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==
+
+is-ci@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c"
+ integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==
+ dependencies:
+ ci-info "^2.0.0"
+
+is-core-module@^2.5.0, is-core-module@^2.8.1, is-core-module@^2.9.0:
+ version "2.9.0"
+ resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69"
+ integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==
+ dependencies:
+ has "^1.0.3"
+
+is-date-object@^1.0.1:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f"
+ integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==
+ dependencies:
+ has-tostringtag "^1.0.0"
+
+is-decimal@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5"
+ integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==
+
+is-docker@^2.0.0, is-docker@^2.1.1:
+ version "2.2.1"
+ resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa"
+ integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==
+
+is-extendable@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
+ integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==
+
+is-extglob@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+ integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
+
+is-fullwidth-code-point@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
+ integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
+
+is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
+ version "4.0.3"
+ resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
+ integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
+ dependencies:
+ is-extglob "^2.1.1"
+
+is-hexadecimal@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz#cc35c97588da4bd49a8eedd6bc4082d44dcb23a7"
+ integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==
+
+is-installed-globally@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520"
+ integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==
+ dependencies:
+ global-dirs "^3.0.0"
+ is-path-inside "^3.0.2"
+
+is-negative-zero@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150"
+ integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==
+
+is-npm@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8"
+ integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==
+
+is-number-object@^1.0.4:
+ version "1.0.7"
+ resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc"
+ integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==
+ dependencies:
+ has-tostringtag "^1.0.0"
+
+is-number@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+ integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
+is-obj@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f"
+ integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==
+
+is-obj@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
+ integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
+
+is-path-cwd@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb"
+ integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==
+
+is-path-inside@^3.0.2:
+ version "3.0.3"
+ resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
+ integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
+
+is-plain-obj@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
+ integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==
+
+is-plain-obj@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287"
+ integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==
+
+is-plain-obj@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7"
+ integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==
+
+is-plain-object@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
+ integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
+ dependencies:
+ isobject "^3.0.1"
+
+is-plain-object@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344"
+ integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==
+
+is-regex@^1.1.4:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958"
+ integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==
+ dependencies:
+ call-bind "^1.0.2"
+ has-tostringtag "^1.0.0"
+
+is-regexp@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069"
+ integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==
+
+is-regexp@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-2.1.0.tgz#cd734a56864e23b956bf4e7c66c396a4c0b22c2d"
+ integrity sha512-OZ4IlER3zmRIoB9AqNhEggVxqIH4ofDns5nRrPS6yQxXE1TPCUpFznBfRQmQa8uC+pXqjMnukiJBxCisIxiLGA==
+
+is-root@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c"
+ integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==
+
+is-shared-array-buffer@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79"
+ integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==
+ dependencies:
+ call-bind "^1.0.2"
+
+is-stream@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
+ integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
+
+is-string@^1.0.5, is-string@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd"
+ integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==
+ dependencies:
+ has-tostringtag "^1.0.0"
+
+is-symbol@^1.0.2, is-symbol@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c"
+ integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==
+ dependencies:
+ has-symbols "^1.0.2"
+
+is-typedarray@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
+ integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==
+
+is-weakref@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2"
+ integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==
+ dependencies:
+ call-bind "^1.0.2"
+
+is-whitespace-character@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7"
+ integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==
+
+is-word-character@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230"
+ integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==
+
+is-wsl@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271"
+ integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==
+ dependencies:
+ is-docker "^2.0.0"
+
+is-yarn-global@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232"
+ integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==
+
+isarray@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
+ integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==
+
+isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+ integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
+
+isobject@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
+ integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==
+
+jest-worker@^27.4.5, jest-worker@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0"
+ integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==
+ dependencies:
+ "@types/node" "*"
+ merge-stream "^2.0.0"
+ supports-color "^8.0.0"
+
+joi@^17.6.0:
+ version "17.6.0"
+ resolved "https://registry.npmjs.org/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2"
+ integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==
+ dependencies:
+ "@hapi/hoek" "^9.0.0"
+ "@hapi/topo" "^5.0.0"
+ "@sideway/address" "^4.1.3"
+ "@sideway/formula" "^3.0.0"
+ "@sideway/pinpoint" "^2.0.0"
+
+"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
+ integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+
+js-yaml@^3.13.1:
+ version "3.14.1"
+ resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
+ integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+js-yaml@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
+ integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
+ dependencies:
+ argparse "^2.0.1"
+
+jsesc@^2.5.1:
+ version "2.5.2"
+ resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
+ integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
+
+jsesc@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
+ integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==
+
+json-buffer@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
+ integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==
+
+json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
+ integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
+
+json-schema-traverse@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
+ integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
+
+json-schema-traverse@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
+ integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
+
+json-stable-stringify-without-jsonify@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
+ integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==
+
+json5@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe"
+ integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==
+ dependencies:
+ minimist "^1.2.0"
+
+json5@^2.1.2, json5@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c"
+ integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==
+
+jsonfile@^6.0.1:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
+ integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
+ dependencies:
+ universalify "^2.0.0"
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
+"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.1:
+ version "3.3.2"
+ resolved "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.2.tgz#afe5efe4332cd3515c065072bd4d6b0aa22152bd"
+ integrity sha512-4ZCADZHRkno244xlNnn4AOG6sRQ7iBZ5BbgZ4vW4y5IZw7cVUD1PPeblm1xx/nfmMxPdt/LHsXZW8z/j58+l9Q==
+ dependencies:
+ array-includes "^3.1.5"
+ object.assign "^4.1.2"
+
+keyv@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
+ integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==
+ dependencies:
+ json-buffer "3.0.0"
+
+kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
+ integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
+
+kleur@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
+ integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==
+
+klona@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc"
+ integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==
+
+known-css-properties@^0.25.0:
+ version "0.25.0"
+ resolved "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.25.0.tgz#6ebc4d4b412f602e5cfbeb4086bd544e34c0a776"
+ integrity sha512-b0/9J1O9Jcyik1GC6KC42hJ41jKwdO/Mq8Mdo5sYN+IuRTXs2YFHZC3kZSx6ueusqa95x3wLYe/ytKjbAfGixA==
+
+language-subtag-registry@~0.3.2:
+ version "0.3.22"
+ resolved "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
+ integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==
+
+language-tags@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a"
+ integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==
+ dependencies:
+ language-subtag-registry "~0.3.2"
+
+latest-version@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
+ integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
+ dependencies:
+ package-json "^6.3.0"
+
+leven@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
+ integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
+
+levn@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade"
+ integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==
+ dependencies:
+ prelude-ls "^1.2.1"
+ type-check "~0.4.0"
+
+lilconfig@^2.0.3:
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4"
+ integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==
+
+lines-and-columns@^1.1.6:
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
+ integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
+
+loader-runner@^4.2.0:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1"
+ integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==
+
+loader-utils@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129"
+ integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^3.0.0"
+ json5 "^2.1.2"
+
+loader-utils@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f"
+ integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==
+
+locate-path@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
+ integrity sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==
+ dependencies:
+ p-locate "^2.0.0"
+ path-exists "^3.0.0"
+
+locate-path@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
+ integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
+ dependencies:
+ p-locate "^3.0.0"
+ path-exists "^3.0.0"
+
+locate-path@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
+ integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
+ dependencies:
+ p-locate "^4.1.0"
+
+locate-path@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
+ integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==
+ dependencies:
+ p-locate "^5.0.0"
+
+lodash.curry@^4.0.1:
+ version "4.1.1"
+ resolved "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz#248e36072ede906501d75966200a86dab8b23170"
+ integrity sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==
+
+lodash.debounce@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
+ integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==
+
+lodash.flow@^3.3.0:
+ version "3.5.0"
+ resolved "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz#87bf40292b8cf83e4e8ce1a3ae4209e20071675a"
+ integrity sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==
+
+lodash.memoize@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
+ integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==
+
+lodash.merge@^4.6.2:
+ version "4.6.2"
+ resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
+ integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
+
+lodash.truncate@^4.4.2:
+ version "4.4.2"
+ resolved "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193"
+ integrity sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==
+
+lodash.uniq@4.5.0, lodash.uniq@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
+ integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
+
+lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21:
+ version "4.17.21"
+ resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
+ integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
+
+loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
+ integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==
+ dependencies:
+ js-tokens "^3.0.0 || ^4.0.0"
+
+lower-case@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28"
+ integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==
+ dependencies:
+ tslib "^2.0.3"
+
+lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
+ integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==
+
+lowercase-keys@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
+ integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
+
+lru-cache@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
+ integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
+ dependencies:
+ yallist "^4.0.0"
+
+make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
+ integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
+ dependencies:
+ semver "^6.0.0"
+
+map-obj@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
+ integrity sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==
+
+map-obj@^4.0.0:
+ version "4.3.0"
+ resolved "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a"
+ integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==
+
+markdown-escapes@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535"
+ integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==
+
+mathml-tag-names@^2.1.3:
+ version "2.1.3"
+ resolved "https://registry.npmjs.org/mathml-tag-names/-/mathml-tag-names-2.1.3.tgz#4ddadd67308e780cf16a47685878ee27b736a0a3"
+ integrity sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==
+
+mdast-squeeze-paragraphs@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz#7c4c114679c3bee27ef10b58e2e015be79f1ef97"
+ integrity sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==
+ dependencies:
+ unist-util-remove "^2.0.0"
+
+mdast-util-definitions@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz#c5c1a84db799173b4dcf7643cda999e440c24db2"
+ integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==
+ dependencies:
+ unist-util-visit "^2.0.0"
+
+mdast-util-to-hast@10.0.1:
+ version "10.0.1"
+ resolved "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz#0cfc82089494c52d46eb0e3edb7a4eb2aea021eb"
+ integrity sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==
+ dependencies:
+ "@types/mdast" "^3.0.0"
+ "@types/unist" "^2.0.0"
+ mdast-util-definitions "^4.0.0"
+ mdurl "^1.0.0"
+ unist-builder "^2.0.0"
+ unist-util-generated "^1.0.0"
+ unist-util-position "^3.0.0"
+ unist-util-visit "^2.0.0"
+
+mdast-util-to-string@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz#b8cfe6a713e1091cb5b728fc48885a4767f8b97b"
+ integrity sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==
+
+mdn-data@2.0.14:
+ version "2.0.14"
+ resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50"
+ integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==
+
+mdurl@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
+ integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==
+
+media-typer@0.3.0:
+ version "0.3.0"
+ resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
+ integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
+
+memfs@^3.1.2, memfs@^3.4.3:
+ version "3.4.7"
+ resolved "https://registry.npmjs.org/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a"
+ integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==
+ dependencies:
+ fs-monkey "^1.0.3"
+
+meow@^9.0.0:
+ version "9.0.0"
+ resolved "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz#cd9510bc5cac9dee7d03c73ee1f9ad959f4ea364"
+ integrity sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==
+ dependencies:
+ "@types/minimist" "^1.2.0"
+ camelcase-keys "^6.2.2"
+ decamelize "^1.2.0"
+ decamelize-keys "^1.1.0"
+ hard-rejection "^2.1.0"
+ minimist-options "4.1.0"
+ normalize-package-data "^3.0.0"
+ read-pkg-up "^7.0.1"
+ redent "^3.0.0"
+ trim-newlines "^3.0.0"
+ type-fest "^0.18.0"
+ yargs-parser "^20.2.3"
+
+merge-descriptors@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
+ integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==
+
+merge-stream@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"
+ integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
+merge2@^1.3.0, merge2@^1.4.1:
+ version "1.4.1"
+ resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
+ integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
+
+methods@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+ integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
+
+micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5:
+ version "4.0.5"
+ resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6"
+ integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==
+ dependencies:
+ braces "^3.0.2"
+ picomatch "^2.3.1"
+
+mime-db@1.52.0, "mime-db@>= 1.43.0 < 2":
+ version "1.52.0"
+ resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+ integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
+
+mime-db@~1.33.0:
+ version "1.33.0"
+ resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db"
+ integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==
+
+mime-types@2.1.18:
+ version "2.1.18"
+ resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8"
+ integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==
+ dependencies:
+ mime-db "~1.33.0"
+
+mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34:
+ version "2.1.35"
+ resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+ integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
+ dependencies:
+ mime-db "1.52.0"
+
+mime@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+ integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
+
+mimic-fn@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
+ integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
+
+mimic-response@^1.0.0, mimic-response@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
+ integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
+
+min-indent@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869"
+ integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==
+
+mini-create-react-context@^0.4.0:
+ version "0.4.1"
+ resolved "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e"
+ integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==
+ dependencies:
+ "@babel/runtime" "^7.12.1"
+ tiny-warning "^1.0.3"
+
+mini-css-extract-plugin@^2.6.1:
+ version "2.6.1"
+ resolved "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e"
+ integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg==
+ dependencies:
+ schema-utils "^4.0.0"
+
+minimalistic-assert@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7"
+ integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==
+
+minimatch@3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
+ integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+ integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist-options@4.1.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619"
+ integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==
+ dependencies:
+ arrify "^1.0.1"
+ is-plain-obj "^1.1.0"
+ kind-of "^6.0.3"
+
+minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6:
+ version "1.2.6"
+ resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
+ integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
+
+mrmime@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz#5f90c825fad4bdd41dc914eff5d1a8cfdaf24f27"
+ integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+ integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==
+
+ms@2.1.2:
+ version "2.1.2"
+ resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+ integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+ms@2.1.3, ms@^2.1.1:
+ version "2.1.3"
+ resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
+ integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
+
+multicast-dns@^7.2.5:
+ version "7.2.5"
+ resolved "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced"
+ integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==
+ dependencies:
+ dns-packet "^5.2.2"
+ thunky "^1.0.2"
+
+nanoid@^3.3.4:
+ version "3.3.4"
+ resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab"
+ integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==
+
+natural-compare@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+ integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
+
+negotiator@0.6.3:
+ version "0.6.3"
+ resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
+ integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
+
+neo-async@^2.6.2:
+ version "2.6.2"
+ resolved "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
+ integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
+
+no-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d"
+ integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==
+ dependencies:
+ lower-case "^2.0.2"
+ tslib "^2.0.3"
+
+node-emoji@^1.10.0:
+ version "1.11.0"
+ resolved "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz#69a0150e6946e2f115e9d7ea4df7971e2628301c"
+ integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==
+ dependencies:
+ lodash "^4.17.21"
+
+node-fetch@2.6.7:
+ version "2.6.7"
+ resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
+ integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
+ dependencies:
+ whatwg-url "^5.0.0"
+
+node-forge@^1:
+ version "1.3.1"
+ resolved "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
+ integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
+
+node-releases@^2.0.6:
+ version "2.0.6"
+ resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503"
+ integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==
+
+normalize-package-data@^2.5.0:
+ version "2.5.0"
+ resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
+ integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==
+ dependencies:
+ hosted-git-info "^2.1.4"
+ resolve "^1.10.0"
+ semver "2 || 3 || 4 || 5"
+ validate-npm-package-license "^3.0.1"
+
+normalize-package-data@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz#dbcc3e2da59509a0983422884cd172eefdfa525e"
+ integrity sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==
+ dependencies:
+ hosted-git-info "^4.0.1"
+ is-core-module "^2.5.0"
+ semver "^7.3.4"
+ validate-npm-package-license "^3.0.1"
+
+normalize-path@^3.0.0, normalize-path@~3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
+ integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
+
+normalize-range@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
+ integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==
+
+normalize-url@^4.1.0:
+ version "4.5.1"
+ resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a"
+ integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==
+
+normalize-url@^6.0.1:
+ version "6.1.0"
+ resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a"
+ integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==
+
+npm-run-path@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea"
+ integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==
+ dependencies:
+ path-key "^3.0.0"
+
+nprogress@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1"
+ integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==
+
+nth-check@^2.0.1:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d"
+ integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==
+ dependencies:
+ boolbase "^1.0.0"
+
+object-assign@^4.1.0, object-assign@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
+ integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
+
+object-inspect@^1.12.0, object-inspect@^1.9.0:
+ version "1.12.2"
+ resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea"
+ integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==
+
+object-keys@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
+ integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
+
+object.assign@^4.1.0, object.assign@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940"
+ integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ has-symbols "^1.0.1"
+ object-keys "^1.1.1"
+
+object.entries@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861"
+ integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.1"
+
+object.fromentries@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251"
+ integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.1"
+
+object.hasown@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3"
+ integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==
+ dependencies:
+ define-properties "^1.1.4"
+ es-abstract "^1.19.5"
+
+object.values@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac"
+ integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.1"
+
+obuf@^1.0.0, obuf@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e"
+ integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==
+
+on-finished@2.4.1:
+ version "2.4.1"
+ resolved "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f"
+ integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==
+ dependencies:
+ ee-first "1.1.1"
+
+on-headers@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
+ integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
+
+once@^1.3.0, once@^1.3.1, once@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
+ dependencies:
+ wrappy "1"
+
+onetime@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
+ integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
+ dependencies:
+ mimic-fn "^2.1.0"
+
+open@^8.0.9, open@^8.4.0:
+ version "8.4.0"
+ resolved "https://registry.npmjs.org/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8"
+ integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==
+ dependencies:
+ define-lazy-prop "^2.0.0"
+ is-docker "^2.1.1"
+ is-wsl "^2.2.0"
+
+opener@^1.5.2:
+ version "1.5.2"
+ resolved "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
+ integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==
+
+optionator@^0.9.1:
+ version "0.9.1"
+ resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499"
+ integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==
+ dependencies:
+ deep-is "^0.1.3"
+ fast-levenshtein "^2.0.6"
+ levn "^0.4.1"
+ prelude-ls "^1.2.1"
+ type-check "^0.4.0"
+ word-wrap "^1.2.3"
+
+p-cancelable@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
+ integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
+
+p-limit@^1.1.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8"
+ integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==
+ dependencies:
+ p-try "^1.0.0"
+
+p-limit@^2.0.0, p-limit@^2.2.0:
+ version "2.3.0"
+ resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
+ integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
+ dependencies:
+ p-try "^2.0.0"
+
+p-limit@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
+ integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==
+ dependencies:
+ yocto-queue "^0.1.0"
+
+p-locate@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
+ integrity sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==
+ dependencies:
+ p-limit "^1.1.0"
+
+p-locate@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
+ integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
+ dependencies:
+ p-limit "^2.0.0"
+
+p-locate@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
+ integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
+ dependencies:
+ p-limit "^2.2.0"
+
+p-locate@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
+ integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==
+ dependencies:
+ p-limit "^3.0.2"
+
+p-map@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b"
+ integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==
+ dependencies:
+ aggregate-error "^3.0.0"
+
+p-retry@^4.5.0:
+ version "4.6.2"
+ resolved "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16"
+ integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==
+ dependencies:
+ "@types/retry" "0.12.0"
+ retry "^0.13.1"
+
+p-try@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
+ integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==
+
+p-try@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
+ integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
+
+package-json@^6.3.0:
+ version "6.5.0"
+ resolved "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0"
+ integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==
+ dependencies:
+ got "^9.6.0"
+ registry-auth-token "^4.0.0"
+ registry-url "^5.0.0"
+ semver "^6.2.0"
+
+param-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
+ integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
+parent-module@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
+ integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
+ dependencies:
+ callsites "^3.0.0"
+
+parse-entities@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8"
+ integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==
+ dependencies:
+ character-entities "^1.0.0"
+ character-entities-legacy "^1.0.0"
+ character-reference-invalid "^1.0.0"
+ is-alphanumerical "^1.0.0"
+ is-decimal "^1.0.0"
+ is-hexadecimal "^1.0.0"
+
+parse-json@^5.0.0:
+ version "5.2.0"
+ resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
+ integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ error-ex "^1.3.1"
+ json-parse-even-better-errors "^2.3.0"
+ lines-and-columns "^1.1.6"
+
+parse-numeric-range@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz#7c63b61190d61e4d53a1197f0c83c47bb670ffa3"
+ integrity sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==
+
+parse5-htmlparser2-tree-adapter@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz#23c2cc233bcf09bb7beba8b8a69d46b08c62c2f1"
+ integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==
+ dependencies:
+ domhandler "^5.0.2"
+ parse5 "^7.0.0"
+
+parse5@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b"
+ integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==
+
+parse5@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz#51f74a5257f5fcc536389e8c2d0b3802e1bfa91a"
+ integrity sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==
+ dependencies:
+ entities "^4.3.0"
+
+parseurl@~1.3.2, parseurl@~1.3.3:
+ version "1.3.3"
+ resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
+ integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
+
+pascal-case@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb"
+ integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+ integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==
+
+path-exists@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
+ integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
+
+path-is-absolute@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+ integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
+
+path-is-inside@1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
+ integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==
+
+path-key@^3.0.0, path-key@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
+ integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-parse@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
+ integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+
+path-to-regexp@0.1.7:
+ version "0.1.7"
+ resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
+ integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==
+
+path-to-regexp@2.2.1:
+ version "2.2.1"
+ resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz#90b617025a16381a879bc82a38d4e8bdeb2bcf45"
+ integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==
+
+path-to-regexp@^1.7.0:
+ version "1.8.0"
+ resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
+ integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
+ dependencies:
+ isarray "0.0.1"
+
+path-type@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
+ integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
+
+picocolors@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
+ integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
+
+picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+ integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
+
+pkg-dir@^4.1.0:
+ version "4.2.0"
+ resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
+ integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
+ dependencies:
+ find-up "^4.0.0"
+
+pkg-up@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5"
+ integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==
+ dependencies:
+ find-up "^3.0.0"
+
+postcss-calc@^8.2.3:
+ version "8.2.4"
+ resolved "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5"
+ integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==
+ dependencies:
+ postcss-selector-parser "^6.0.9"
+ postcss-value-parser "^4.2.0"
+
+postcss-colormin@^5.3.0:
+ version "5.3.0"
+ resolved "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a"
+ integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg==
+ dependencies:
+ browserslist "^4.16.6"
+ caniuse-api "^3.0.0"
+ colord "^2.9.1"
+ postcss-value-parser "^4.2.0"
+
+postcss-convert-values@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab"
+ integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g==
+ dependencies:
+ browserslist "^4.20.3"
+ postcss-value-parser "^4.2.0"
+
+postcss-discard-comments@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696"
+ integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==
+
+postcss-discard-duplicates@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848"
+ integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==
+
+postcss-discard-empty@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c"
+ integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==
+
+postcss-discard-overridden@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e"
+ integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==
+
+postcss-discard-unused@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz#8974e9b143d887677304e558c1166d3762501142"
+ integrity sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw==
+ dependencies:
+ postcss-selector-parser "^6.0.5"
+
+postcss-loader@^7.0.0:
+ version "7.0.1"
+ resolved "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.0.1.tgz#4c883cc0a1b2bfe2074377b7a74c1cd805684395"
+ integrity sha512-VRviFEyYlLjctSM93gAZtcJJ/iSkPZ79zWbN/1fSH+NisBByEiVLqpdVDrPLVSi8DX0oJo12kL/GppTBdKVXiQ==
+ dependencies:
+ cosmiconfig "^7.0.0"
+ klona "^2.0.5"
+ semver "^7.3.7"
+
+postcss-media-query-parser@^0.2.3:
+ version "0.2.3"
+ resolved "https://registry.npmjs.org/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz#27b39c6f4d94f81b1a73b8f76351c609e5cef244"
+ integrity sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==
+
+postcss-merge-idents@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz#7753817c2e0b75d0853b56f78a89771e15ca04a1"
+ integrity sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw==
+ dependencies:
+ cssnano-utils "^3.1.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-merge-longhand@^5.1.6:
+ version "5.1.6"
+ resolved "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce"
+ integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+ stylehacks "^5.1.0"
+
+postcss-merge-rules@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5"
+ integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ==
+ dependencies:
+ browserslist "^4.16.6"
+ caniuse-api "^3.0.0"
+ cssnano-utils "^3.1.0"
+ postcss-selector-parser "^6.0.5"
+
+postcss-minify-font-values@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b"
+ integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-gradients@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c"
+ integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==
+ dependencies:
+ colord "^2.9.1"
+ cssnano-utils "^3.1.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-params@^5.1.3:
+ version "5.1.3"
+ resolved "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9"
+ integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg==
+ dependencies:
+ browserslist "^4.16.6"
+ cssnano-utils "^3.1.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-selectors@^5.2.1:
+ version "5.2.1"
+ resolved "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6"
+ integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==
+ dependencies:
+ postcss-selector-parser "^6.0.5"
+
+postcss-modules-extract-imports@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d"
+ integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==
+
+postcss-modules-local-by-default@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c"
+ integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==
+ dependencies:
+ icss-utils "^5.0.0"
+ postcss-selector-parser "^6.0.2"
+ postcss-value-parser "^4.1.0"
+
+postcss-modules-scope@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06"
+ integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==
+ dependencies:
+ postcss-selector-parser "^6.0.4"
+
+postcss-modules-values@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c"
+ integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==
+ dependencies:
+ icss-utils "^5.0.0"
+
+postcss-normalize-charset@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed"
+ integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==
+
+postcss-normalize-display-values@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8"
+ integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-positions@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92"
+ integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-repeat-style@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2"
+ integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-string@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228"
+ integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-timing-functions@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb"
+ integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-unicode@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75"
+ integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ==
+ dependencies:
+ browserslist "^4.16.6"
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-url@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc"
+ integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==
+ dependencies:
+ normalize-url "^6.0.1"
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-whitespace@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa"
+ integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-ordered-values@^5.1.3:
+ version "5.1.3"
+ resolved "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38"
+ integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==
+ dependencies:
+ cssnano-utils "^3.1.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-reduce-idents@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz#c89c11336c432ac4b28792f24778859a67dfba95"
+ integrity sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-reduce-initial@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6"
+ integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw==
+ dependencies:
+ browserslist "^4.16.6"
+ caniuse-api "^3.0.0"
+
+postcss-reduce-transforms@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9"
+ integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-resolve-nested-selector@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.npmjs.org/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz#29ccbc7c37dedfac304e9fff0bf1596b3f6a0e4e"
+ integrity sha512-HvExULSwLqHLgUy1rl3ANIqCsvMS0WHss2UOsXhXnQaZ9VCc2oBvIpXrl00IUFT5ZDITME0o6oiXeiHr2SAIfw==
+
+postcss-safe-parser@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-6.0.0.tgz#bb4c29894171a94bc5c996b9a30317ef402adaa1"
+ integrity sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==
+
+postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9:
+ version "6.0.10"
+ resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d"
+ integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==
+ dependencies:
+ cssesc "^3.0.0"
+ util-deprecate "^1.0.2"
+
+postcss-sort-media-queries@^4.2.1:
+ version "4.2.1"
+ resolved "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.2.1.tgz#a99bae69ef1098ee3b64a5fa94d258ec240d0355"
+ integrity sha512-9VYekQalFZ3sdgcTjXMa0dDjsfBVHXlraYJEMiOJ/2iMmI2JGCMavP16z3kWOaRu8NSaJCTgVpB/IVpH5yT9YQ==
+ dependencies:
+ sort-css-media-queries "2.0.4"
+
+postcss-svgo@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d"
+ integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+ svgo "^2.7.0"
+
+postcss-unique-selectors@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6"
+ integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==
+ dependencies:
+ postcss-selector-parser "^6.0.5"
+
+postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514"
+ integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==
+
+postcss-zindex@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz#4a5c7e5ff1050bd4c01d95b1847dfdcc58a496ff"
+ integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A==
+
+postcss@^8.3.11, postcss@^8.4.13, postcss@^8.4.14, postcss@^8.4.7:
+ version "8.4.14"
+ resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf"
+ integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==
+ dependencies:
+ nanoid "^3.3.4"
+ picocolors "^1.0.0"
+ source-map-js "^1.0.2"
+
+prelude-ls@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
+ integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
+
+prepend-http@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
+ integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==
+
+prettier@^2.7.1:
+ version "2.7.1"
+ resolved "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64"
+ integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==
+
+pretty-error@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6"
+ integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==
+ dependencies:
+ lodash "^4.17.20"
+ renderkid "^3.0.0"
+
+pretty-time@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz#ffb7429afabb8535c346a34e41873adf3d74dd0e"
+ integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==
+
+prism-react-renderer@^1.2.1, prism-react-renderer@^1.3.5:
+ version "1.3.5"
+ resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz#786bb69aa6f73c32ba1ee813fbe17a0115435085"
+ integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==
+
+prismjs@^1.28.0:
+ version "1.28.0"
+ resolved "https://registry.npmjs.org/prismjs/-/prismjs-1.28.0.tgz#0d8f561fa0f7cf6ebca901747828b149147044b6"
+ integrity sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw==
+
+process-nextick-args@~2.0.0:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
+ integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+
+promise@^7.1.1:
+ version "7.3.1"
+ resolved "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf"
+ integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==
+ dependencies:
+ asap "~2.0.3"
+
+prompts@^2.4.2:
+ version "2.4.2"
+ resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
+ integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==
+ dependencies:
+ kleur "^3.0.3"
+ sisteransi "^1.0.5"
+
+prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1:
+ version "15.8.1"
+ resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
+ integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==
+ dependencies:
+ loose-envify "^1.4.0"
+ object-assign "^4.1.1"
+ react-is "^16.13.1"
+
+property-information@^5.0.0, property-information@^5.3.0:
+ version "5.6.0"
+ resolved "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz#61675545fb23002f245c6540ec46077d4da3ed69"
+ integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==
+ dependencies:
+ xtend "^4.0.0"
+
+proxy-addr@~2.0.7:
+ version "2.0.7"
+ resolved "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025"
+ integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
+ dependencies:
+ forwarded "0.2.0"
+ ipaddr.js "1.9.1"
+
+pump@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
+ integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
+ dependencies:
+ end-of-stream "^1.1.0"
+ once "^1.3.1"
+
+punycode@^1.3.2:
+ version "1.4.1"
+ resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
+ integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==
+
+punycode@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
+ integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
+
+pupa@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62"
+ integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==
+ dependencies:
+ escape-goat "^2.0.0"
+
+pure-color@^1.2.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz#1fe064fb0ac851f0de61320a8bf796836422f33e"
+ integrity sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==
+
+qs@6.10.3:
+ version "6.10.3"
+ resolved "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e"
+ integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==
+ dependencies:
+ side-channel "^1.0.4"
+
+queue-microtask@^1.2.2:
+ version "1.2.3"
+ resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
+ integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
+
+queue@6.0.2:
+ version "6.0.2"
+ resolved "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz#b91525283e2315c7553d2efa18d83e76432fed65"
+ integrity sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==
+ dependencies:
+ inherits "~2.0.3"
+
+quick-lru@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f"
+ integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==
+
+randombytes@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a"
+ integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==
+ dependencies:
+ safe-buffer "^5.1.0"
+
+range-parser@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e"
+ integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==
+
+range-parser@^1.2.1, range-parser@~1.2.1:
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
+ integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
+
+raw-body@2.5.1:
+ version "2.5.1"
+ resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857"
+ integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
+ dependencies:
+ bytes "3.1.2"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ unpipe "1.0.0"
+
+rc@1.2.8, rc@^1.2.8:
+ version "1.2.8"
+ resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
+ integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
+ dependencies:
+ deep-extend "^0.6.0"
+ ini "~1.3.0"
+ minimist "^1.2.0"
+ strip-json-comments "~2.0.1"
+
+react-base16-styling@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz#ef2156d66cf4139695c8a167886cb69ea660792c"
+ integrity sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==
+ dependencies:
+ base16 "^1.0.0"
+ lodash.curry "^4.0.1"
+ lodash.flow "^3.3.0"
+ pure-color "^1.2.0"
+
+react-dev-utils@^12.0.1:
+ version "12.0.1"
+ resolved "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73"
+ integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==
+ dependencies:
+ "@babel/code-frame" "^7.16.0"
+ address "^1.1.2"
+ browserslist "^4.18.1"
+ chalk "^4.1.2"
+ cross-spawn "^7.0.3"
+ detect-port-alt "^1.1.6"
+ escape-string-regexp "^4.0.0"
+ filesize "^8.0.6"
+ find-up "^5.0.0"
+ fork-ts-checker-webpack-plugin "^6.5.0"
+ global-modules "^2.0.0"
+ globby "^11.0.4"
+ gzip-size "^6.0.0"
+ immer "^9.0.7"
+ is-root "^2.1.0"
+ loader-utils "^3.2.0"
+ open "^8.4.0"
+ pkg-up "^3.1.0"
+ prompts "^2.4.2"
+ react-error-overlay "^6.0.11"
+ recursive-readdir "^2.2.2"
+ shell-quote "^1.7.3"
+ strip-ansi "^6.0.1"
+ text-table "^0.2.0"
+
+react-dom@^17.0.2:
+ version "17.0.2"
+ resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23"
+ integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+ scheduler "^0.20.2"
+
+react-error-overlay@^6.0.11:
+ version "6.0.11"
+ resolved "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb"
+ integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==
+
+react-fast-compare@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.0.tgz#641a9da81b6a6320f270e89724fb45a0b39e43bb"
+ integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==
+
+react-helmet-async@*, react-helmet-async@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz#7bd5bf8c5c69ea9f02f6083f14ce33ef545c222e"
+ integrity sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==
+ dependencies:
+ "@babel/runtime" "^7.12.5"
+ invariant "^2.2.4"
+ prop-types "^15.7.2"
+ react-fast-compare "^3.2.0"
+ shallowequal "^1.1.0"
+
+react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0:
+ version "16.13.1"
+ resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
+ integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
+
+react-json-view@^1.21.3:
+ version "1.21.3"
+ resolved "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz#f184209ee8f1bf374fb0c41b0813cff54549c475"
+ integrity sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==
+ dependencies:
+ flux "^4.0.1"
+ react-base16-styling "^0.6.0"
+ react-lifecycles-compat "^3.0.4"
+ react-textarea-autosize "^8.3.2"
+
+react-lifecycles-compat@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362"
+ integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==
+
+react-loadable-ssr-addon-v5-slorber@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz#2cdc91e8a744ffdf9e3556caabeb6e4278689883"
+ integrity sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==
+ dependencies:
+ "@babel/runtime" "^7.10.3"
+
+react-router-config@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz#0f4263d1a80c6b2dc7b9c1902c9526478194a988"
+ integrity sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+
+react-router-dom@^5.3.3:
+ version "5.3.3"
+ resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.3.tgz#8779fc28e6691d07afcaf98406d3812fe6f11199"
+ integrity sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng==
+ dependencies:
+ "@babel/runtime" "^7.12.13"
+ history "^4.9.0"
+ loose-envify "^1.3.1"
+ prop-types "^15.6.2"
+ react-router "5.3.3"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
+react-router@5.3.3, react-router@^5.3.3:
+ version "5.3.3"
+ resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.3.tgz#8e3841f4089e728cf82a429d92cdcaa5e4a3a288"
+ integrity sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w==
+ dependencies:
+ "@babel/runtime" "^7.12.13"
+ history "^4.9.0"
+ hoist-non-react-statics "^3.1.0"
+ loose-envify "^1.3.1"
+ mini-create-react-context "^0.4.0"
+ path-to-regexp "^1.7.0"
+ prop-types "^15.6.2"
+ react-is "^16.6.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
+react-textarea-autosize@^8.3.2:
+ version "8.3.4"
+ resolved "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz#270a343de7ad350534141b02c9cb78903e553524"
+ integrity sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ==
+ dependencies:
+ "@babel/runtime" "^7.10.2"
+ use-composed-ref "^1.3.0"
+ use-latest "^1.2.1"
+
+react@^17.0.2:
+ version "17.0.2"
+ resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037"
+ integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+
+read-pkg-up@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507"
+ integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==
+ dependencies:
+ find-up "^4.1.0"
+ read-pkg "^5.2.0"
+ type-fest "^0.8.1"
+
+read-pkg@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc"
+ integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==
+ dependencies:
+ "@types/normalize-package-data" "^2.4.0"
+ normalize-package-data "^2.5.0"
+ parse-json "^5.0.0"
+ type-fest "^0.6.0"
+
+readable-stream@^2.0.1:
+ version "2.3.7"
+ resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
+ integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
+readable-stream@^3.0.6:
+ version "3.6.0"
+ resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
+ integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
+ dependencies:
+ inherits "^2.0.3"
+ string_decoder "^1.1.1"
+ util-deprecate "^1.0.1"
+
+readdirp@~3.6.0:
+ version "3.6.0"
+ resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
+ integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
+ dependencies:
+ picomatch "^2.2.1"
+
+reading-time@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz#d2a7f1b6057cb2e169beaf87113cc3411b5bc5bb"
+ integrity sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg==
+
+rechoir@^0.6.2:
+ version "0.6.2"
+ resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"
+ integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==
+ dependencies:
+ resolve "^1.1.6"
+
+recursive-readdir@^2.2.2:
+ version "2.2.2"
+ resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f"
+ integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==
+ dependencies:
+ minimatch "3.0.4"
+
+redent@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f"
+ integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==
+ dependencies:
+ indent-string "^4.0.0"
+ strip-indent "^3.0.0"
+
+regenerate-unicode-properties@^10.0.1:
+ version "10.0.1"
+ resolved "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz#7f442732aa7934a3740c779bb9b3340dccc1fb56"
+ integrity sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==
+ dependencies:
+ regenerate "^1.4.2"
+
+regenerate@^1.4.2:
+ version "1.4.2"
+ resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
+ integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
+
+regenerator-runtime@^0.13.4:
+ version "0.13.9"
+ resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
+ integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
+
+regenerator-transform@^0.15.0:
+ version "0.15.0"
+ resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537"
+ integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==
+ dependencies:
+ "@babel/runtime" "^7.8.4"
+
+regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3:
+ version "1.4.3"
+ resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac"
+ integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ functions-have-names "^1.2.2"
+
+regexpp@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2"
+ integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==
+
+regexpu-core@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.1.0.tgz#2f8504c3fd0ebe11215783a41541e21c79942c6d"
+ integrity sha512-bb6hk+xWd2PEOkj5It46A16zFMs2mv86Iwpdu94la4S3sJ7C973h2dHpYKwIBGaWSO7cIRJ+UX0IeMaWcO4qwA==
+ dependencies:
+ regenerate "^1.4.2"
+ regenerate-unicode-properties "^10.0.1"
+ regjsgen "^0.6.0"
+ regjsparser "^0.8.2"
+ unicode-match-property-ecmascript "^2.0.0"
+ unicode-match-property-value-ecmascript "^2.0.0"
+
+registry-auth-token@^4.0.0:
+ version "4.2.2"
+ resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.2.tgz#f02d49c3668884612ca031419491a13539e21fac"
+ integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg==
+ dependencies:
+ rc "1.2.8"
+
+registry-url@^5.0.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009"
+ integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==
+ dependencies:
+ rc "^1.2.8"
+
+regjsgen@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz#83414c5354afd7d6627b16af5f10f41c4e71808d"
+ integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==
+
+regjsparser@^0.8.2:
+ version "0.8.4"
+ resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz#8a14285ffcc5de78c5b95d62bbf413b6bc132d5f"
+ integrity sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==
+ dependencies:
+ jsesc "~0.5.0"
+
+relateurl@^0.2.7:
+ version "0.2.7"
+ resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9"
+ integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==
+
+remark-emoji@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz#1c702090a1525da5b80e15a8f963ef2c8236cac7"
+ integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w==
+ dependencies:
+ emoticon "^3.2.0"
+ node-emoji "^1.10.0"
+ unist-util-visit "^2.0.3"
+
+remark-footnotes@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz#9001c4c2ffebba55695d2dd80ffb8b82f7e6303f"
+ integrity sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==
+
+remark-mdx@1.6.22:
+ version "1.6.22"
+ resolved "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz#06a8dab07dcfdd57f3373af7f86bd0e992108bbd"
+ integrity sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==
+ dependencies:
+ "@babel/core" "7.12.9"
+ "@babel/helper-plugin-utils" "7.10.4"
+ "@babel/plugin-proposal-object-rest-spread" "7.12.1"
+ "@babel/plugin-syntax-jsx" "7.12.1"
+ "@mdx-js/util" "1.6.22"
+ is-alphabetical "1.0.4"
+ remark-parse "8.0.3"
+ unified "9.2.0"
+
+remark-parse@8.0.3:
+ version "8.0.3"
+ resolved "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz#9c62aa3b35b79a486454c690472906075f40c7e1"
+ integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==
+ dependencies:
+ ccount "^1.0.0"
+ collapse-white-space "^1.0.2"
+ is-alphabetical "^1.0.0"
+ is-decimal "^1.0.0"
+ is-whitespace-character "^1.0.0"
+ is-word-character "^1.0.0"
+ markdown-escapes "^1.0.0"
+ parse-entities "^2.0.0"
+ repeat-string "^1.5.4"
+ state-toggle "^1.0.0"
+ trim "0.0.1"
+ trim-trailing-lines "^1.0.0"
+ unherit "^1.0.4"
+ unist-util-remove-position "^2.0.0"
+ vfile-location "^3.0.0"
+ xtend "^4.0.1"
+
+remark-squeeze-paragraphs@4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz#76eb0e085295131c84748c8e43810159c5653ead"
+ integrity sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==
+ dependencies:
+ mdast-squeeze-paragraphs "^4.0.0"
+
+renderkid@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a"
+ integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==
+ dependencies:
+ css-select "^4.1.3"
+ dom-converter "^0.2.0"
+ htmlparser2 "^6.1.0"
+ lodash "^4.17.21"
+ strip-ansi "^6.0.1"
+
+repeat-string@^1.5.4:
+ version "1.6.1"
+ resolved "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
+ integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==
+
+require-from-string@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909"
+ integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
+
+"require-like@>= 0.1.1":
+ version "0.1.2"
+ resolved "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa"
+ integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==
+
+requires-port@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+ integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==
+
+resolve-from@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
+ integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
+
+resolve-from@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
+ integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
+
+resolve-pathname@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd"
+ integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==
+
+resolve@^1.1.6, resolve@^1.10.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.3.2:
+ version "1.22.1"
+ resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
+ integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==
+ dependencies:
+ is-core-module "^2.9.0"
+ path-parse "^1.0.7"
+ supports-preserve-symlinks-flag "^1.0.0"
+
+resolve@^2.0.0-next.3:
+ version "2.0.0-next.4"
+ resolved "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660"
+ integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==
+ dependencies:
+ is-core-module "^2.9.0"
+ path-parse "^1.0.7"
+ supports-preserve-symlinks-flag "^1.0.0"
+
+responselike@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
+ integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==
+ dependencies:
+ lowercase-keys "^1.0.0"
+
+retry@^0.13.1:
+ version "0.13.1"
+ resolved "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658"
+ integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==
+
+reusify@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
+ integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
+
+rimraf@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
+ integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
+ dependencies:
+ glob "^7.1.3"
+
+rtl-detect@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz#40ae0ea7302a150b96bc75af7d749607392ecac6"
+ integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ==
+
+rtlcss@^3.5.0:
+ version "3.5.0"
+ resolved "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz#c9eb91269827a102bac7ae3115dd5d049de636c3"
+ integrity sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A==
+ dependencies:
+ find-up "^5.0.0"
+ picocolors "^1.0.0"
+ postcss "^8.3.11"
+ strip-json-comments "^3.1.1"
+
+run-parallel@^1.1.9:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
+ integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
+ dependencies:
+ queue-microtask "^1.2.2"
+
+rxjs@^7.5.4:
+ version "7.5.6"
+ resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.5.6.tgz#0446577557862afd6903517ce7cae79ecb9662bc"
+ integrity sha512-dnyv2/YsXhnm461G+R/Pe5bWP41Nm6LBXEYWI6eiFP4fiwx6WRI/CD0zbdVAudd9xwLEF2IDcKXLHit0FYjUzw==
+ dependencies:
+ tslib "^2.1.0"
+
+safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
+safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0:
+ version "5.2.1"
+ resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
+ integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
+
+"safer-buffer@>= 2.1.2 < 3":
+ version "2.1.2"
+ resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
+ integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
+
+sax@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+ integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
+
+scheduler@^0.20.2:
+ version "0.20.2"
+ resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91"
+ integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+
+schema-utils@2.7.0:
+ version "2.7.0"
+ resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7"
+ integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==
+ dependencies:
+ "@types/json-schema" "^7.0.4"
+ ajv "^6.12.2"
+ ajv-keywords "^3.4.1"
+
+schema-utils@^2.6.5:
+ version "2.7.1"
+ resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7"
+ integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==
+ dependencies:
+ "@types/json-schema" "^7.0.5"
+ ajv "^6.12.4"
+ ajv-keywords "^3.5.2"
+
+schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281"
+ integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==
+ dependencies:
+ "@types/json-schema" "^7.0.8"
+ ajv "^6.12.5"
+ ajv-keywords "^3.5.2"
+
+schema-utils@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7"
+ integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==
+ dependencies:
+ "@types/json-schema" "^7.0.9"
+ ajv "^8.8.0"
+ ajv-formats "^2.1.1"
+ ajv-keywords "^5.0.0"
+
+section-matter@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz#e9041953506780ec01d59f292a19c7b850b84167"
+ integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==
+ dependencies:
+ extend-shallow "^2.0.1"
+ kind-of "^6.0.0"
+
+select-hose@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
+ integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==
+
+selfsigned@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/selfsigned/-/selfsigned-2.0.1.tgz#8b2df7fa56bf014d19b6007655fff209c0ef0a56"
+ integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ==
+ dependencies:
+ node-forge "^1"
+
+semver-diff@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
+ integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
+ dependencies:
+ semver "^6.3.0"
+
+"semver@2 || 3 || 4 || 5", semver@^5.4.1:
+ version "5.7.1"
+ resolved "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
+ integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
+
+semver@7.0.0:
+ version "7.0.0"
+ resolved "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+ integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
+
+semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
+ integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
+
+semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7:
+ version "7.3.7"
+ resolved "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f"
+ integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==
+ dependencies:
+ lru-cache "^6.0.0"
+
+send@0.18.0:
+ version "0.18.0"
+ resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
+ integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
+ dependencies:
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ mime "1.6.0"
+ ms "2.1.3"
+ on-finished "2.4.1"
+ range-parser "~1.2.1"
+ statuses "2.0.1"
+
+serialize-javascript@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8"
+ integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==
+ dependencies:
+ randombytes "^2.1.0"
+
+serve-handler@^6.1.3:
+ version "6.1.3"
+ resolved "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz#1bf8c5ae138712af55c758477533b9117f6435e8"
+ integrity sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==
+ dependencies:
+ bytes "3.0.0"
+ content-disposition "0.5.2"
+ fast-url-parser "1.1.3"
+ mime-types "2.1.18"
+ minimatch "3.0.4"
+ path-is-inside "1.0.2"
+ path-to-regexp "2.2.1"
+ range-parser "1.2.0"
+
+serve-index@^1.9.1:
+ version "1.9.1"
+ resolved "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239"
+ integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==
+ dependencies:
+ accepts "~1.3.4"
+ batch "0.6.1"
+ debug "2.6.9"
+ escape-html "~1.0.3"
+ http-errors "~1.6.2"
+ mime-types "~2.1.17"
+ parseurl "~1.3.2"
+
+serve-static@1.15.0:
+ version "1.15.0"
+ resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540"
+ integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==
+ dependencies:
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ parseurl "~1.3.3"
+ send "0.18.0"
+
+setimmediate@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
+ integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==
+
+setprototypeof@1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
+ integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==
+
+setprototypeof@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
+ integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
+
+shallow-clone@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3"
+ integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==
+ dependencies:
+ kind-of "^6.0.2"
+
+shallowequal@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8"
+ integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==
+
+shebang-command@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
+ integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+ dependencies:
+ shebang-regex "^3.0.0"
+
+shebang-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
+ integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
+
+shell-quote@^1.7.3:
+ version "1.7.3"
+ resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123"
+ integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==
+
+shelljs@^0.8.5:
+ version "0.8.5"
+ resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c"
+ integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==
+ dependencies:
+ glob "^7.0.0"
+ interpret "^1.0.0"
+ rechoir "^0.6.2"
+
+side-channel@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
+ integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==
+ dependencies:
+ call-bind "^1.0.0"
+ get-intrinsic "^1.0.2"
+ object-inspect "^1.9.0"
+
+signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7:
+ version "3.0.7"
+ resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
+ integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
+
+sirv@^1.0.7:
+ version "1.0.19"
+ resolved "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz#1d73979b38c7fe91fcba49c85280daa9c2363b49"
+ integrity sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ==
+ dependencies:
+ "@polka/url" "^1.0.0-next.20"
+ mrmime "^1.0.0"
+ totalist "^1.0.0"
+
+sisteransi@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
+ integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==
+
+sitemap@^7.1.1:
+ version "7.1.1"
+ resolved "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz#eeed9ad6d95499161a3eadc60f8c6dce4bea2bef"
+ integrity sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==
+ dependencies:
+ "@types/node" "^17.0.5"
+ "@types/sax" "^1.2.1"
+ arg "^5.0.0"
+ sax "^1.2.4"
+
+slash@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
+ integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
+
+slash@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7"
+ integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==
+
+slice-ansi@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b"
+ integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==
+ dependencies:
+ ansi-styles "^4.0.0"
+ astral-regex "^2.0.0"
+ is-fullwidth-code-point "^3.0.0"
+
+sockjs@^0.3.24:
+ version "0.3.24"
+ resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce"
+ integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==
+ dependencies:
+ faye-websocket "^0.11.3"
+ uuid "^8.3.2"
+ websocket-driver "^0.7.4"
+
+sort-css-media-queries@2.0.4:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz#b2badfa519cb4a938acbc6d3aaa913d4949dc908"
+ integrity sha512-PAIsEK/XupCQwitjv7XxoMvYhT7EAfyzI3hsy/MyDgTvc+Ft55ctdkctJLOy6cQejaIC+zjpUL4djFVm2ivOOw==
+
+source-map-js@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
+ integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
+
+source-map-support@~0.5.20:
+ version "0.5.21"
+ resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+ integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
+ dependencies:
+ buffer-from "^1.0.0"
+ source-map "^0.6.0"
+
+source-map@^0.5.0:
+ version "0.5.7"
+ resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
+ integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==
+
+source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0:
+ version "0.6.1"
+ resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+ integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
+space-separated-tokens@^1.0.0:
+ version "1.1.5"
+ resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899"
+ integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==
+
+spdx-correct@^3.0.0:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
+ integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==
+ dependencies:
+ spdx-expression-parse "^3.0.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-exceptions@^2.1.0:
+ version "2.3.0"
+ resolved "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d"
+ integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==
+
+spdx-expression-parse@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679"
+ integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==
+ dependencies:
+ spdx-exceptions "^2.1.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-license-ids@^3.0.0:
+ version "3.0.11"
+ resolved "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz#50c0d8c40a14ec1bf449bae69a0ea4685a9d9f95"
+ integrity sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g==
+
+spdy-transport@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31"
+ integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==
+ dependencies:
+ debug "^4.1.0"
+ detect-node "^2.0.4"
+ hpack.js "^2.1.6"
+ obuf "^1.1.2"
+ readable-stream "^3.0.6"
+ wbuf "^1.7.3"
+
+spdy@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b"
+ integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==
+ dependencies:
+ debug "^4.1.0"
+ handle-thing "^2.0.0"
+ http-deceiver "^1.2.7"
+ select-hose "^2.0.0"
+ spdy-transport "^3.0.0"
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
+
+stable@^0.1.8:
+ version "0.1.8"
+ resolved "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf"
+ integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==
+
+state-toggle@^1.0.0:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe"
+ integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==
+
+statuses@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
+ integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
+
+"statuses@>= 1.4.0 < 2":
+ version "1.5.0"
+ resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
+ integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==
+
+std-env@^3.0.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/std-env/-/std-env-3.1.1.tgz#1f19c4d3f6278c52efd08a94574a2a8d32b7d092"
+ integrity sha512-/c645XdExBypL01TpFKiG/3RAa/Qmu+zRi0MwAmrdEkwHNuN0ebo8ccAXBBDa5Z0QOJgBskUIbuCK91x0sCVEw==
+
+string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3:
+ version "4.2.3"
+ resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
+ integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
+ dependencies:
+ emoji-regex "^8.0.0"
+ is-fullwidth-code-point "^3.0.0"
+ strip-ansi "^6.0.1"
+
+string-width@^5.0.1:
+ version "5.1.2"
+ resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794"
+ integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==
+ dependencies:
+ eastasianwidth "^0.2.0"
+ emoji-regex "^9.2.2"
+ strip-ansi "^7.0.1"
+
+string.prototype.matchall@^4.0.7:
+ version "4.0.7"
+ resolved "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d"
+ integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.19.1"
+ get-intrinsic "^1.1.1"
+ has-symbols "^1.0.3"
+ internal-slot "^1.0.3"
+ regexp.prototype.flags "^1.4.1"
+ side-channel "^1.0.4"
+
+string.prototype.trimend@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0"
+ integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.4"
+ es-abstract "^1.19.5"
+
+string.prototype.trimstart@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef"
+ integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.4"
+ es-abstract "^1.19.5"
+
+string_decoder@^1.1.1:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
+ integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
+ dependencies:
+ safe-buffer "~5.2.0"
+
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
+stringify-object@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629"
+ integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==
+ dependencies:
+ get-own-enumerable-property-symbols "^3.0.0"
+ is-obj "^1.0.1"
+ is-regexp "^1.0.0"
+
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
+ integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
+ dependencies:
+ ansi-regex "^5.0.1"
+
+strip-ansi@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2"
+ integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==
+ dependencies:
+ ansi-regex "^6.0.1"
+
+strip-bom-string@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92"
+ integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==
+
+strip-bom@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
+ integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==
+
+strip-final-newline@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
+ integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
+
+strip-indent@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001"
+ integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==
+ dependencies:
+ min-indent "^1.0.0"
+
+strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
+ integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
+strip-json-comments@~2.0.1:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
+ integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
+
+style-search@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.npmjs.org/style-search/-/style-search-0.1.0.tgz#7958c793e47e32e07d2b5cafe5c0bf8e12e77902"
+ integrity sha512-Dj1Okke1C3uKKwQcetra4jSuk0DqbzbYtXipzFlFMZtowbF1x7BKJwB9AayVMyFARvU8EDrZdcax4At/452cAg==
+
+style-to-object@0.3.0, style-to-object@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz#b1b790d205991cc783801967214979ee19a76e46"
+ integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==
+ dependencies:
+ inline-style-parser "0.1.1"
+
+stylehacks@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520"
+ integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q==
+ dependencies:
+ browserslist "^4.16.6"
+ postcss-selector-parser "^6.0.4"
+
+stylelint@^14.9.1:
+ version "14.9.1"
+ resolved "https://registry.npmjs.org/stylelint/-/stylelint-14.9.1.tgz#6494ed38f148b1e75b402d678a3b6a8aae86dfda"
+ integrity sha512-RdAkJdPiLqHawCSnu21nE27MjNXaVd4WcOHA4vK5GtIGjScfhNnaOuWR2wWdfKFAvcWQPOYe311iveiVKSmwsA==
+ dependencies:
+ "@csstools/selector-specificity" "^2.0.1"
+ balanced-match "^2.0.0"
+ colord "^2.9.2"
+ cosmiconfig "^7.0.1"
+ css-functions-list "^3.1.0"
+ debug "^4.3.4"
+ execall "^2.0.0"
+ fast-glob "^3.2.11"
+ fastest-levenshtein "^1.0.12"
+ file-entry-cache "^6.0.1"
+ get-stdin "^8.0.0"
+ global-modules "^2.0.0"
+ globby "^11.1.0"
+ globjoin "^0.1.4"
+ html-tags "^3.2.0"
+ ignore "^5.2.0"
+ import-lazy "^4.0.0"
+ imurmurhash "^0.1.4"
+ is-plain-object "^5.0.0"
+ known-css-properties "^0.25.0"
+ mathml-tag-names "^2.1.3"
+ meow "^9.0.0"
+ micromatch "^4.0.5"
+ normalize-path "^3.0.0"
+ picocolors "^1.0.0"
+ postcss "^8.4.14"
+ postcss-media-query-parser "^0.2.3"
+ postcss-resolve-nested-selector "^0.1.1"
+ postcss-safe-parser "^6.0.0"
+ postcss-selector-parser "^6.0.10"
+ postcss-value-parser "^4.2.0"
+ resolve-from "^5.0.0"
+ string-width "^4.2.3"
+ strip-ansi "^6.0.1"
+ style-search "^0.1.0"
+ supports-hyperlinks "^2.2.0"
+ svg-tags "^1.0.0"
+ table "^6.8.0"
+ v8-compile-cache "^2.3.0"
+ write-file-atomic "^4.0.1"
+
+supports-color@^5.3.0:
+ version "5.5.0"
+ resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
+ integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^7.0.0, supports-color@^7.1.0:
+ version "7.2.0"
+ resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+ integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-color@^8.0.0:
+ version "8.1.1"
+ resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c"
+ integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-hyperlinks@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb"
+ integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ==
+ dependencies:
+ has-flag "^4.0.0"
+ supports-color "^7.0.0"
+
+supports-preserve-symlinks-flag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
+ integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+
+svg-parser@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5"
+ integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==
+
+svg-tags@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764"
+ integrity sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==
+
+svgo@^2.7.0, svgo@^2.8.0:
+ version "2.8.0"
+ resolved "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24"
+ integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==
+ dependencies:
+ "@trysound/sax" "0.2.0"
+ commander "^7.2.0"
+ css-select "^4.1.3"
+ css-tree "^1.1.3"
+ csso "^4.2.0"
+ picocolors "^1.0.0"
+ stable "^0.1.8"
+
+table@^6.8.0:
+ version "6.8.0"
+ resolved "https://registry.npmjs.org/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca"
+ integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==
+ dependencies:
+ ajv "^8.0.1"
+ lodash.truncate "^4.4.2"
+ slice-ansi "^4.0.0"
+ string-width "^4.2.3"
+ strip-ansi "^6.0.1"
+
+tapable@^1.0.0:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
+ integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
+
+tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0:
+ version "2.2.1"
+ resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0"
+ integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==
+
+terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.3.3:
+ version "5.3.3"
+ resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz#8033db876dd5875487213e87c627bca323e5ed90"
+ integrity sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ==
+ dependencies:
+ "@jridgewell/trace-mapping" "^0.3.7"
+ jest-worker "^27.4.5"
+ schema-utils "^3.1.1"
+ serialize-javascript "^6.0.0"
+ terser "^5.7.2"
+
+terser@^5.10.0, terser@^5.7.2:
+ version "5.14.2"
+ resolved "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10"
+ integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==
+ dependencies:
+ "@jridgewell/source-map" "^0.3.2"
+ acorn "^8.5.0"
+ commander "^2.20.0"
+ source-map-support "~0.5.20"
+
+text-table@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
+ integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==
+
+thunky@^1.0.2:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d"
+ integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==
+
+tiny-invariant@^1.0.2:
+ version "1.2.0"
+ resolved "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9"
+ integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg==
+
+tiny-warning@^1.0.0, tiny-warning@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754"
+ integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
+
+to-fast-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
+ integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==
+
+to-readable-stream@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771"
+ integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==
+
+to-regex-range@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+ integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+ dependencies:
+ is-number "^7.0.0"
+
+toidentifier@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
+ integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
+
+totalist@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz#a4d65a3e546517701e3e5c37a47a70ac97fe56df"
+ integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==
+
+tr46@~0.0.3:
+ version "0.0.3"
+ resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
+ integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
+
+trim-newlines@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144"
+ integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==
+
+trim-trailing-lines@^1.0.0:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0"
+ integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==
+
+trim@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz#5858547f6b290757ee95cccc666fb50084c460dd"
+ integrity sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==
+
+trough@^1.0.0:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406"
+ integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==
+
+tsconfig-paths@^3.14.1:
+ version "3.14.1"
+ resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a"
+ integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==
+ dependencies:
+ "@types/json5" "^0.0.29"
+ json5 "^1.0.1"
+ minimist "^1.2.6"
+ strip-bom "^3.0.0"
+
+tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0:
+ version "2.4.0"
+ resolved "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
+ integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==
+
+type-check@^0.4.0, type-check@~0.4.0:
+ version "0.4.0"
+ resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
+ integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==
+ dependencies:
+ prelude-ls "^1.2.1"
+
+type-fest@^0.18.0:
+ version "0.18.1"
+ resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz#db4bc151a4a2cf4eebf9add5db75508db6cc841f"
+ integrity sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==
+
+type-fest@^0.20.2:
+ version "0.20.2"
+ resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
+ integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
+
+type-fest@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
+ integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
+
+type-fest@^0.8.1:
+ version "0.8.1"
+ resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
+ integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
+
+type-fest@^2.5.0:
+ version "2.17.0"
+ resolved "https://registry.npmjs.org/type-fest/-/type-fest-2.17.0.tgz#c677030ce61e5be0c90c077d52571eb73c506ea9"
+ integrity sha512-U+g3/JVXnOki1kLSc+xZGPRll3Ah9u2VIG6Sn9iH9YX6UkPERmt6O/0fIyTgsd2/whV0+gAaHAg8fz6sG1QzMA==
+
+type-is@~1.6.18:
+ version "1.6.18"
+ resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
+ integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
+ dependencies:
+ media-typer "0.3.0"
+ mime-types "~2.1.24"
+
+typedarray-to-buffer@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
+ integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
+ dependencies:
+ is-typedarray "^1.0.0"
+
+ua-parser-js@^0.7.30:
+ version "0.7.31"
+ resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.31.tgz#649a656b191dffab4f21d5e053e27ca17cbff5c6"
+ integrity sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==
+
+unbox-primitive@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e"
+ integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==
+ dependencies:
+ call-bind "^1.0.2"
+ has-bigints "^1.0.2"
+ has-symbols "^1.0.3"
+ which-boxed-primitive "^1.0.2"
+
+unherit@^1.0.4:
+ version "1.1.3"
+ resolved "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz#6c9b503f2b41b262330c80e91c8614abdaa69c22"
+ integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==
+ dependencies:
+ inherits "^2.0.0"
+ xtend "^4.0.0"
+
+unicode-canonical-property-names-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc"
+ integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==
+
+unicode-match-property-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3"
+ integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==
+ dependencies:
+ unicode-canonical-property-names-ecmascript "^2.0.0"
+ unicode-property-aliases-ecmascript "^2.0.0"
+
+unicode-match-property-value-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714"
+ integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==
+
+unicode-property-aliases-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8"
+ integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==
+
+unified@9.2.0:
+ version "9.2.0"
+ resolved "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz#67a62c627c40589edebbf60f53edfd4d822027f8"
+ integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==
+ dependencies:
+ bail "^1.0.0"
+ extend "^3.0.0"
+ is-buffer "^2.0.0"
+ is-plain-obj "^2.0.0"
+ trough "^1.0.0"
+ vfile "^4.0.0"
+
+unified@^9.2.2:
+ version "9.2.2"
+ resolved "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975"
+ integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ==
+ dependencies:
+ bail "^1.0.0"
+ extend "^3.0.0"
+ is-buffer "^2.0.0"
+ is-plain-obj "^2.0.0"
+ trough "^1.0.0"
+ vfile "^4.0.0"
+
+unique-string@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
+ integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
+ dependencies:
+ crypto-random-string "^2.0.0"
+
+unist-builder@2.0.3, unist-builder@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz#77648711b5d86af0942f334397a33c5e91516436"
+ integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==
+
+unist-util-generated@^1.0.0:
+ version "1.1.6"
+ resolved "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz#5ab51f689e2992a472beb1b35f2ce7ff2f324d4b"
+ integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==
+
+unist-util-is@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz#976e5f462a7a5de73d94b706bac1b90671b57797"
+ integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==
+
+unist-util-position@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz#1c42ee6301f8d52f47d14f62bbdb796571fa2d47"
+ integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==
+
+unist-util-remove-position@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz#5d19ca79fdba712301999b2b73553ca8f3b352cc"
+ integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==
+ dependencies:
+ unist-util-visit "^2.0.0"
+
+unist-util-remove@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz#b0b4738aa7ee445c402fda9328d604a02d010588"
+ integrity sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==
+ dependencies:
+ unist-util-is "^4.0.0"
+
+unist-util-stringify-position@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da"
+ integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==
+ dependencies:
+ "@types/unist" "^2.0.2"
+
+unist-util-visit-parents@^3.0.0:
+ version "3.1.1"
+ resolved "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz#65a6ce698f78a6b0f56aa0e88f13801886cdaef6"
+ integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==
+ dependencies:
+ "@types/unist" "^2.0.0"
+ unist-util-is "^4.0.0"
+
+unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz#c3703893146df47203bb8a9795af47d7b971208c"
+ integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==
+ dependencies:
+ "@types/unist" "^2.0.0"
+ unist-util-is "^4.0.0"
+ unist-util-visit-parents "^3.0.0"
+
+universalify@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
+ integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==
+
+unpipe@1.0.0, unpipe@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
+ integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
+
+update-browserslist-db@^1.0.4:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.5.tgz#be06a5eedd62f107b7c19eb5bcefb194411abf38"
+ integrity sha512-dteFFpCyvuDdr9S/ff1ISkKt/9YZxKjI9WlRR99c180GaztJtRa/fn18FdxGVKVsnPY7/a/FDN68mcvUmP4U7Q==
+ dependencies:
+ escalade "^3.1.1"
+ picocolors "^1.0.0"
+
+update-notifier@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9"
+ integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==
+ dependencies:
+ boxen "^5.0.0"
+ chalk "^4.1.0"
+ configstore "^5.0.1"
+ has-yarn "^2.1.0"
+ import-lazy "^2.1.0"
+ is-ci "^2.0.0"
+ is-installed-globally "^0.4.0"
+ is-npm "^5.0.0"
+ is-yarn-global "^0.3.0"
+ latest-version "^5.1.0"
+ pupa "^2.1.1"
+ semver "^7.3.4"
+ semver-diff "^3.1.1"
+ xdg-basedir "^4.0.0"
+
+uri-js@^4.2.2:
+ version "4.4.1"
+ resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
+ integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
+ dependencies:
+ punycode "^2.1.0"
+
+url-loader@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz#28505e905cae158cf07c92ca622d7f237e70a4e2"
+ integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==
+ dependencies:
+ loader-utils "^2.0.0"
+ mime-types "^2.1.27"
+ schema-utils "^3.0.0"
+
+url-parse-lax@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c"
+ integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==
+ dependencies:
+ prepend-http "^2.0.0"
+
+use-composed-ref@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz#3d8104db34b7b264030a9d916c5e94fbe280dbda"
+ integrity sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ==
+
+use-isomorphic-layout-effect@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz#497cefb13d863d687b08477d9e5a164ad8c1a6fb"
+ integrity sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==
+
+use-latest@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz#d13dfb4b08c28e3e33991546a2cee53e14038cf2"
+ integrity sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw==
+ dependencies:
+ use-isomorphic-layout-effect "^1.1.1"
+
+util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+ integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
+
+utila@~0.4:
+ version "0.4.0"
+ resolved "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
+ integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==
+
+utility-types@^3.10.0:
+ version "3.10.0"
+ resolved "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz#ea4148f9a741015f05ed74fd615e1d20e6bed82b"
+ integrity sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==
+
+utils-merge@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+ integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
+
+uuid@^8.3.2:
+ version "8.3.2"
+ resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
+ integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+
+v8-compile-cache@^2.0.3, v8-compile-cache@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee"
+ integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==
+
+validate-npm-package-license@^3.0.1:
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a"
+ integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==
+ dependencies:
+ spdx-correct "^3.0.0"
+ spdx-expression-parse "^3.0.0"
+
+value-equal@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c"
+ integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==
+
+vary@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
+ integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
+
+vfile-location@^3.0.0, vfile-location@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz#d8e41fbcbd406063669ebf6c33d56ae8721d0f3c"
+ integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==
+
+vfile-message@^2.0.0:
+ version "2.0.4"
+ resolved "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a"
+ integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==
+ dependencies:
+ "@types/unist" "^2.0.0"
+ unist-util-stringify-position "^2.0.0"
+
+vfile@^4.0.0:
+ version "4.2.1"
+ resolved "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz#03f1dce28fc625c625bc6514350fbdb00fa9e624"
+ integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==
+ dependencies:
+ "@types/unist" "^2.0.0"
+ is-buffer "^2.0.0"
+ unist-util-stringify-position "^2.0.0"
+ vfile-message "^2.0.0"
+
+wait-on@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz#16bbc4d1e4ebdd41c5b4e63a2e16dbd1f4e5601e"
+ integrity sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw==
+ dependencies:
+ axios "^0.25.0"
+ joi "^17.6.0"
+ lodash "^4.17.21"
+ minimist "^1.2.5"
+ rxjs "^7.5.4"
+
+watchpack@^2.3.1:
+ version "2.4.0"
+ resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d"
+ integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==
+ dependencies:
+ glob-to-regexp "^0.4.1"
+ graceful-fs "^4.1.2"
+
+wbuf@^1.1.0, wbuf@^1.7.3:
+ version "1.7.3"
+ resolved "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df"
+ integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==
+ dependencies:
+ minimalistic-assert "^1.0.0"
+
+web-namespaces@^1.0.0:
+ version "1.1.4"
+ resolved "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec"
+ integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==
+
+webidl-conversions@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
+ integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
+
+webpack-bundle-analyzer@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.5.0.tgz#1b0eea2947e73528754a6f9af3e91b2b6e0f79d5"
+ integrity sha512-GUMZlM3SKwS8Z+CKeIFx7CVoHn3dXFcUAjT/dcZQQmfSZGvitPfMob2ipjai7ovFFqPvTqkEZ/leL4O0YOdAYQ==
+ dependencies:
+ acorn "^8.0.4"
+ acorn-walk "^8.0.0"
+ chalk "^4.1.0"
+ commander "^7.2.0"
+ gzip-size "^6.0.0"
+ lodash "^4.17.20"
+ opener "^1.5.2"
+ sirv "^1.0.7"
+ ws "^7.3.1"
+
+webpack-dev-middleware@^5.3.1:
+ version "5.3.3"
+ resolved "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f"
+ integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==
+ dependencies:
+ colorette "^2.0.10"
+ memfs "^3.4.3"
+ mime-types "^2.1.31"
+ range-parser "^1.2.1"
+ schema-utils "^4.0.0"
+
+webpack-dev-server@^4.9.3:
+ version "4.9.3"
+ resolved "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.9.3.tgz#2360a5d6d532acb5410a668417ad549ee3b8a3c9"
+ integrity sha512-3qp/eoboZG5/6QgiZ3llN8TUzkSpYg1Ko9khWX1h40MIEUNS2mDoIa8aXsPfskER+GbTvs/IJZ1QTBBhhuetSw==
+ dependencies:
+ "@types/bonjour" "^3.5.9"
+ "@types/connect-history-api-fallback" "^1.3.5"
+ "@types/express" "^4.17.13"
+ "@types/serve-index" "^1.9.1"
+ "@types/serve-static" "^1.13.10"
+ "@types/sockjs" "^0.3.33"
+ "@types/ws" "^8.5.1"
+ ansi-html-community "^0.0.8"
+ bonjour-service "^1.0.11"
+ chokidar "^3.5.3"
+ colorette "^2.0.10"
+ compression "^1.7.4"
+ connect-history-api-fallback "^2.0.0"
+ default-gateway "^6.0.3"
+ express "^4.17.3"
+ graceful-fs "^4.2.6"
+ html-entities "^2.3.2"
+ http-proxy-middleware "^2.0.3"
+ ipaddr.js "^2.0.1"
+ open "^8.0.9"
+ p-retry "^4.5.0"
+ rimraf "^3.0.2"
+ schema-utils "^4.0.0"
+ selfsigned "^2.0.1"
+ serve-index "^1.9.1"
+ sockjs "^0.3.24"
+ spdy "^4.0.2"
+ webpack-dev-middleware "^5.3.1"
+ ws "^8.4.2"
+
+webpack-merge@^5.8.0:
+ version "5.8.0"
+ resolved "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61"
+ integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==
+ dependencies:
+ clone-deep "^4.0.1"
+ wildcard "^2.0.0"
+
+webpack-sources@^3.2.2, webpack-sources@^3.2.3:
+ version "3.2.3"
+ resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde"
+ integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==
+
+webpack@^5.73.0:
+ version "5.73.0"
+ resolved "https://registry.npmjs.org/webpack/-/webpack-5.73.0.tgz#bbd17738f8a53ee5760ea2f59dce7f3431d35d38"
+ integrity sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA==
+ dependencies:
+ "@types/eslint-scope" "^3.7.3"
+ "@types/estree" "^0.0.51"
+ "@webassemblyjs/ast" "1.11.1"
+ "@webassemblyjs/wasm-edit" "1.11.1"
+ "@webassemblyjs/wasm-parser" "1.11.1"
+ acorn "^8.4.1"
+ acorn-import-assertions "^1.7.6"
+ browserslist "^4.14.5"
+ chrome-trace-event "^1.0.2"
+ enhanced-resolve "^5.9.3"
+ es-module-lexer "^0.9.0"
+ eslint-scope "5.1.1"
+ events "^3.2.0"
+ glob-to-regexp "^0.4.1"
+ graceful-fs "^4.2.9"
+ json-parse-even-better-errors "^2.3.1"
+ loader-runner "^4.2.0"
+ mime-types "^2.1.27"
+ neo-async "^2.6.2"
+ schema-utils "^3.1.0"
+ tapable "^2.1.1"
+ terser-webpack-plugin "^5.1.3"
+ watchpack "^2.3.1"
+ webpack-sources "^3.2.3"
+
+webpackbar@^5.0.2:
+ version "5.0.2"
+ resolved "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz#d3dd466211c73852741dfc842b7556dcbc2b0570"
+ integrity sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==
+ dependencies:
+ chalk "^4.1.0"
+ consola "^2.15.3"
+ pretty-time "^1.1.0"
+ std-env "^3.0.1"
+
+websocket-driver@>=0.5.1, websocket-driver@^0.7.4:
+ version "0.7.4"
+ resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760"
+ integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==
+ dependencies:
+ http-parser-js ">=0.5.1"
+ safe-buffer ">=5.1.0"
+ websocket-extensions ">=0.1.1"
+
+websocket-extensions@>=0.1.1:
+ version "0.1.4"
+ resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42"
+ integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==
+
+whatwg-url@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
+ integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==
+ dependencies:
+ tr46 "~0.0.3"
+ webidl-conversions "^3.0.0"
+
+which-boxed-primitive@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6"
+ integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==
+ dependencies:
+ is-bigint "^1.0.1"
+ is-boolean-object "^1.1.0"
+ is-number-object "^1.0.4"
+ is-string "^1.0.5"
+ is-symbol "^1.0.3"
+
+which@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
+ integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
+ dependencies:
+ isexe "^2.0.0"
+
+which@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
+ integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+ dependencies:
+ isexe "^2.0.0"
+
+widest-line@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca"
+ integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==
+ dependencies:
+ string-width "^4.0.0"
+
+widest-line@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz#a0fc673aaba1ea6f0a0d35b3c2795c9a9cc2ebf2"
+ integrity sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==
+ dependencies:
+ string-width "^5.0.1"
+
+wildcard@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec"
+ integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==
+
+word-wrap@^1.2.3:
+ version "1.2.3"
+ resolved "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
+ integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
+
+wrap-ansi@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
+ integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
+wrap-ansi@^8.0.1:
+ version "8.0.1"
+ resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.0.1.tgz#2101e861777fec527d0ea90c57c6b03aac56a5b3"
+ integrity sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g==
+ dependencies:
+ ansi-styles "^6.1.0"
+ string-width "^5.0.1"
+ strip-ansi "^7.0.1"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+ integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
+
+write-file-atomic@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
+ integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
+ dependencies:
+ imurmurhash "^0.1.4"
+ is-typedarray "^1.0.0"
+ signal-exit "^3.0.2"
+ typedarray-to-buffer "^3.1.5"
+
+write-file-atomic@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.1.tgz#9faa33a964c1c85ff6f849b80b42a88c2c537c8f"
+ integrity sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ==
+ dependencies:
+ imurmurhash "^0.1.4"
+ signal-exit "^3.0.7"
+
+ws@^7.3.1:
+ version "7.5.9"
+ resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
+ integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
+
+ws@^8.4.2:
+ version "8.8.1"
+ resolved "https://registry.npmjs.org/ws/-/ws-8.8.1.tgz#5dbad0feb7ade8ecc99b830c1d77c913d4955ff0"
+ integrity sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==
+
+xdg-basedir@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
+ integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
+
+xml-js@^1.6.11:
+ version "1.6.11"
+ resolved "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz#927d2f6947f7f1c19a316dd8eea3614e8b18f8e9"
+ integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==
+ dependencies:
+ sax "^1.2.4"
+
+xtend@^4.0.0, xtend@^4.0.1:
+ version "4.0.2"
+ resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
+ integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
+
+yallist@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
+ integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
+
+yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2:
+ version "1.10.2"
+ resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"
+ integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
+
+yargs-parser@^20.2.3:
+ version "20.2.9"
+ resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
+ integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==
+
+yocto-queue@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
+ integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
+
+zwitch@^1.0.0:
+ version "1.0.5"
+ resolved "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz#d11d7381ffed16b742f6af7b3f223d5cd9fe9920"
+ integrity sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==